text stringlengths 38 1.54M |
|---|
x1 = int(input('сколько человек в 1 классе: '))
x2 = int(input('сколько человек во 2 классе: '))
x3 = int(input('сколько человек в 3 классе: '))
import math
a = math.ceil(x1 / 2)
b = math.ceil(x2 / 2)
c = math.ceil(x3 / 2)
x = (a + b + c)
print(x)
|
def check_if_valid():
while True:
try:
num = float(input("Please enter a number: "))
except ValueError:
print("That is not a number. Please try again.")
else:
return num
def f_to_c(a):
return (a-32)*(5/9)
x = check_if_valid()
print("That temperature in Celsius is: " + str(f_to_c(x))) |
import numpy as np
import pprint as pp
aaa = np.array([1,2,3,4,5])
print(aaa.shape)
print(aaa)
aaa = aaa.reshape(5,1)
print(aaa.shape)
print(aaa)
bbb = np.array([[1,2,3], [4,5,6]])
print(bbb.shape)
bbb = np.array([[[[1,2,3],[4,5,6]]]])
print(bbb.shape)
ccc = np.array([[1,2],[3,4],[5,6]])
print(ccc.shape)
ddd = ccc.reshape(3,2,1,1)
print(ddd.shape)
pp.pprint(ddd) |
#!/usr/bin/env python3
######################################################
## Python implementation of the following examples ##
## https://github.com/IntelRealSense/librealsense/blob/master/doc/post-processing-filters.md
## https://github.com/IntelRealSense/librealsense/tree/master/wrappers/opencv/depth-filter
## https://github.com/IntelRealSense/librealsense/tree/master/examples/C/depth
## https://github.com/IntelRealSense/librealsense/blob/master/wrappers/python/examples/python-rs400-advanced-mode-example.py
######################################################
# Install required packages:
# pip3 install pyrealsense2
# pip3 install opencv-python
# First import the libraries
import sys
import pyrealsense2 as rs # Intel RealSense cross-platform open-source API
import time
import numpy as np # fundamental package for scientific computing
import json
# in order to import cv2 under python3 when you also have ROS installed
import os
if os.path.exists("/opt/ros/kinetic/lib/python2.7/dist-packages"):
sys.path.remove('/opt/ros/kinetic/lib/python2.7/dist-packages')
if os.path.exists("~/anaconda3/lib/python3.7/site-packages"):
sys.path.append('~/anaconda3/lib/python3.7/site-packages')
import cv2
######################################################
## These parameters are reconfigurable ##
######################################################
STREAM_TYPE = [rs.stream.depth, rs.stream.color] # rs2_stream is a types of data provided by RealSense device
FORMAT = [rs.format.z16, rs.format.bgr8] # rs2_format is identifies how binary data is encoded within a frame
WIDTH = 848 # Defines the number of columns for each frame or zero for auto resolve
HEIGHT = 480 # Defines the number of lines for each frame or zero for auto resolve
FPS = 30 # Defines the rate of frames per second
DISPLAY_WINDOW_NAME = 'Input/output depth'
OPTION_WINDOW_NAME = 'Filter options'
USE_PRESET_FILE = True
PRESET_FILE = "../cfg/d4xx-default.json"
# List of filters to be applied, in this order.
# Depth Frame (input)
# >> Decimation Filter (reduces depth frame density)
# >> Threshold Filter (removes values outside recommended range)
# >> Depth2Disparity Transform** (transform the scene into disparity domain)
# >> Spatial Filter (edge-preserving spatial smoothing)
# >> Temporal Filter (reduces temporal noise)
# >> Hole Filling Filter (rectify missing data in the resulting image)
# >> Disparity2Depth Transform** (revert the results back to depth)
# >> Filtered Depth (output)
filters = [
[True, "Decimation Filter", rs.decimation_filter()],
[True, "Threshold Filter", rs.threshold_filter()],
[True, "Depth to Disparity", rs.disparity_transform(True)],
[True, "Spatial Filter", rs.spatial_filter()],
[True, "Temporal Filter", rs.temporal_filter()],
[True, "Hole Filling Filter", rs.hole_filling_filter()],
[True, "Disparity to Depth", rs.disparity_transform(False)]
]
######################################################
## Functions to change filtering options online ##
## Description for each option can be found at: ##
## https://github.com/IntelRealSense/librealsense/blob/master/doc/post-processing-filters.md
######################################################
decimation_magnitude_min = 2
decimation_magnitude_max = 8
def on_trackbar_decimation(val):
# Sanity check
if val < decimation_magnitude_min:
print("\nFilter magnitude for Decimation Filter cannot be smaller than ", decimation_magnitude_min)
val = decimation_magnitude_min
filters[0][2].set_option(rs.option.filter_magnitude, val)
threshold_min_m = 0.15
threshold_max_m = 10.0
def on_trackbar_max_threshold(val_m):
# Sanity check
if val_m < threshold_min_m:
print("\nMaximum threshold cannot be smaller than ", threshold_min_m)
val_m = threshold_min_m
elif val_m > threshold_max_m:
print("\nMaximum threshold cannot be larger than ", threshold_max_m)
val_m = threshold_max_m
# filters[1][2].set_option(rs.option.min_distance, val_m)
filters[1][2].set_option(rs.option.max_distance, val_m)
spatial_magnitude_min = 1
spatial_magnitude_max = 5
def on_trackbar_spatial_magnitude(val):
# Sanity check
if val < spatial_magnitude_min:
print("\nFilter magnitude for Spatial Filter cannot be smaller than ", spatial_magnitude_min)
val = spatial_magnitude_min
filters[3][2].set_option(rs.option.filter_magnitude, val)
spatial_smooth_alpha_min = 0.25
spatial_smooth_alpha_max = 1
spatial_smooth_alpha_scaled_max = 10
def on_trackbar_spatial_smooth_alpha(val):
# Step in cv2 trackbar only allows discrete value, so we remap it from [0-spatial_smooth_alpha_scaled_max] to [0-spatial_smooth_alpha_max]
val = val / spatial_smooth_alpha_scaled_max * spatial_smooth_alpha_max
# Sanity check
if val < spatial_smooth_alpha_min:
print("\nFilter magnitude for Spatial Filter cannot be smaller than ", spatial_smooth_alpha_min)
val = spatial_smooth_alpha_min
filters[3][2].set_option(rs.option.filter_smooth_alpha, val)
spatial_smooth_delta_min = 1
spatial_smooth_delta_max = 50
def on_trackbar_spatial_smooth_delta(val):
# Sanity check
if val < spatial_smooth_delta_min:
print("\nSmooth alpha for Spatial Filter cannot be smaller than ", spatial_smooth_delta_min)
val = spatial_smooth_delta_min
filters[3][2].set_option(rs.option.filter_smooth_delta, val)
spatial_hole_filling_min = 0
spatial_hole_filling_max = 5
def on_trackbar_spatial_hole_filling(val):
# Sanity check
if val < spatial_hole_filling_min:
print("\nSmooth alpha for Spatial Filter cannot be smaller than ", spatial_hole_filling_min)
val = spatial_hole_filling_min
filters[3][2].set_option(rs.option.holes_fill, val)
hole_filling_filter_min = 0
hole_filling_filter_max = 2
def on_trackbar_hole_filling(val):
# direction: 0-from left, 1-farest from around, 2-nearest from around
filters[5][2].set_option(rs.option.holes_fill, val)
######################################################
## Functions to interface with D4xx cameras ##
######################################################
DS5_product_ids = ["0AD1", "0AD2", "0AD3", "0AD4", "0AD5", "0AF6", "0AFE", "0AFF", "0B00", "0B01", "0B03", "0B07","0B3A"]
def find_device_that_supports_advanced_mode() :
ctx = rs.context()
ds5_dev = rs.device()
devices = ctx.query_devices();
for dev in devices:
if dev.supports(rs.camera_info.product_id) and str(dev.get_info(rs.camera_info.product_id)) in DS5_product_ids:
if dev.supports(rs.camera_info.name):
print("Found device that supports advanced mode:", dev.get_info(rs.camera_info.name))
return dev
raise Exception("No device that supports advanced mode was found")
# Loop until we successfully enable advanced mode
def d4xx_enable_advanced_mode(advnc_mode):
while not advnc_mode.is_enabled():
print("Trying to enable advanced mode...")
advnc_mode.toggle_advanced_mode(True)
# At this point the device will disconnect and re-connect.
print("Sleeping for 5 seconds...")
time.sleep(5)
# The 'dev' object will become invalid and we need to initialize it again
dev = find_device_that_supports_advanced_mode()
advnc_mode = rs.rs400_advanced_mode(dev)
print("Advanced mode is", "enabled" if advnc_mode.is_enabled() else "disabled")
# Load the settings stored in the JSON file
def d4xx_load_settings_file(advnc_mode, setting_file):
# Sanity checks
if os.path.isfile(setting_file):
print("Setting file found", setting_file)
else:
print("Cannot find setting file ", setting_file)
exit()
if advnc_mode.is_enabled():
print("Advanced mode is enabled")
else:
print("Device does not support advanced mode")
exit()
# Input for load_json() is the content of the json file, not the file path
with open(setting_file, 'r') as file:
json_text = file.read().strip()
advnc_mode.load_json(json_text)
######################################################
## Main program starts here ##
######################################################
try:
if USE_PRESET_FILE:
device = find_device_that_supports_advanced_mode()
advnc_mode = rs.rs400_advanced_mode(device)
d4xx_enable_advanced_mode(advnc_mode)
d4xx_load_settings_file(advnc_mode, PRESET_FILE)
# Create a context object. This object owns the handles to all connected realsense devices
pipeline = rs.pipeline()
# Configure depth and color streams
config = rs.config()
config.enable_stream(STREAM_TYPE[0], WIDTH, HEIGHT, FORMAT[0], FPS)
config.enable_stream(STREAM_TYPE[1], WIDTH, HEIGHT, FORMAT[1], FPS)
colorizer = rs.colorizer()
for i in range(len(filters)):
if filters[i][0] is True:
print("Applying: ", filters[i][1])
else:
print("NOT applying: ", filters[i][1])
# Start streaming
profile = pipeline.start(config)
# Create the image windows to be used
cv2.namedWindow(OPTION_WINDOW_NAME, cv2.WINDOW_NORMAL)
cv2.namedWindow(DISPLAY_WINDOW_NAME, cv2.WINDOW_AUTOSIZE)
# Create trackbars for options modifiers
# NOTE: - The trackbar's minimum is always zero and cannot be changed
# - The trackbar's steps are discrete (so 0-1-2 etc.)
cv2.createTrackbar('Decimation magnitude [2-8]', OPTION_WINDOW_NAME, 0, decimation_magnitude_max, on_trackbar_decimation)
cv2.createTrackbar('Threshold [0-any] (cm)', OPTION_WINDOW_NAME, int(threshold_max_m), int(threshold_max_m), on_trackbar_max_threshold)
cv2.createTrackbar('Spatial magnitude [1-5]', OPTION_WINDOW_NAME, 0, spatial_magnitude_max, on_trackbar_spatial_magnitude)
cv2.createTrackbar('Spatial smooth alpha [0.25-1]', OPTION_WINDOW_NAME, 0, spatial_smooth_alpha_scaled_max, on_trackbar_spatial_smooth_alpha)
cv2.createTrackbar('Spatial smooth delta [1-50]', OPTION_WINDOW_NAME, 0, spatial_smooth_delta_max, on_trackbar_spatial_smooth_delta)
cv2.createTrackbar('Spatial hole filling [0-5]', OPTION_WINDOW_NAME, 0, spatial_hole_filling_max, on_trackbar_spatial_hole_filling)
cv2.createTrackbar('Hole filling direction [0-2]', OPTION_WINDOW_NAME, 0, hole_filling_filter_max, on_trackbar_hole_filling)
# Avoid unnecessary blank space in the displayed window
cv2.resizeWindow(OPTION_WINDOW_NAME, 600, 100)
last_time = time.time()
while True:
# This call waits until a new coherent set of frames is available on a device
# Calls to get_frame_data(...) and get_frame_timestamp(...) on a device will return stable values until wait_for_frames(...) is called
frames = pipeline.wait_for_frames()
depth_frame = frames.get_depth_frame()
if not depth_frame:
continue
# Apply the filters
filtered_frame = depth_frame
for i in range(len(filters)):
if filters[i][0] is True:
filtered_frame = filters[i][2].process(filtered_frame)
# Show the processing speed
processing_speed = 1/(time.time() - last_time)
print("\r>> Processing speed %.2f fps" %(processing_speed), end='')
last_time = time.time()
# Prepare the images
input_image = np.asanyarray(colorizer.colorize(depth_frame).get_data())
output_image = np.asanyarray(colorizer.colorize(filtered_frame).get_data())
display_image = np.hstack((input_image, cv2.resize(output_image, (WIDTH, HEIGHT))))
# Put the fps in the corner of the image
text = ("%0.2f" % (processing_speed,)) + ' fps'
textsize = cv2.getTextSize(text, cv2.FONT_HERSHEY_SIMPLEX, 1, 2)[0]
cv2.putText(display_image,
text,
org = (int((display_image.shape[1] - textsize[0]/2)), int((textsize[1])/2)),
fontFace = cv2.FONT_HERSHEY_SIMPLEX,
fontScale = 0.5,
thickness = 1,
color = (255, 255, 255))
# Show the images with the fps
cv2.imshow(DISPLAY_WINDOW_NAME, display_image)
cv2.waitKey(1)
except KeyboardInterrupt:
print('Keyboard interrupt. Closing the script...')
except Exception as e:
print(e)
pass |
""" Small hand-written recursive descent parser for SVG <path> data.
This software is OSI Certified Open Source Software.
OSI Certified is a certification mark of the Open Source Initiative.
Copyright (c) 2006, Enthought, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of Enthought, Inc. nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import sys, re
def print_error():
exc, err, traceback = sys.exc_info()
print exc, traceback.tb_frame.f_code.co_filename, 'ERROR ON LINE', traceback.tb_lineno, '\n', err
del exc, err, traceback
class _EOF(object):
def __repr__(self):
return 'EOF'
EOF = _EOF()
lexicon = [
('float', r'[-\+]?(?:(?:[0-9]*\.[0-9]+)|(?:[0-9]+\.))(?:[Ee][-\+]?[0-9]+)?'),
('int', r'[-\+]?[0-9]+'),
('command', r'[AaCcHhLlMmQqSsTtVvZz]'),
]
class Lexer(object):
""" Break SVG path data into tokens.
The SVG spec requires that tokens are greedy. This lexer relies on Python's
regexes defaulting to greediness.
This style of implementation was inspired by this article:
http://www.gooli.org/blog/a-simple-lexer-in-python/
"""
def __init__(self, lexicon):
self.lexicon = lexicon
parts = []
for name, regex in lexicon:
parts.append('(?P<%s>%s)' % (name, regex))
self.regex_string = '|'.join(parts)
self.regex = re.compile(self.regex_string)
def lex(self, text):
#~ self.text = text
""" Yield (token_type, str_data) tokens.
The last token will be (EOF, None) where EOF is the singleton object
defined in this module.
"""
for match in self.regex.finditer(text):
for name, _ in self.lexicon:
m = match.group(name)
if m is not None:
yield (name, m)
break
yield (EOF, None)
svg_path_lexer = Lexer(lexicon)
class SVGPathParser(object):
""" Parse SVG <path> data into a list of commands.
Each distinct command will take the form of a tuple (command, data). The
`command` is just the character string that starts the command group in the
<path> data, so 'M' for absolute moveto, 'm' for relative moveto, 'Z' for
closepath, etc. The kind of data it carries with it depends on the command.
For 'Z' (closepath), it's just None. The others are lists of individual
argument groups. Multiple elements in these lists usually mean to repeat the
command. The notable exception is 'M' (moveto) where only the first element
is truly a moveto. The remainder are implicit linetos.
See the SVG documentation for the interpretation of the individual elements
for each command.
The main method is `parse(text)`. It can only consume actual strings, not
filelike objects or iterators.
"""
def __init__(self, lexer = svg_path_lexer):
self.lexer = lexer
self.command_dispatch = {
'Z': self.rule_closepath,
'z': self.rule_closepath,
'M': self.rule_moveto_or_lineto,
'm': self.rule_moveto_or_lineto,
'L': self.rule_moveto_or_lineto,
'l': self.rule_moveto_or_lineto,
'H': self.rule_orthogonal_lineto,
'h': self.rule_orthogonal_lineto,
'V': self.rule_orthogonal_lineto,
'v': self.rule_orthogonal_lineto,
'C': self.rule_curveto3,
'c': self.rule_curveto3,
'S': self.rule_curveto2,
's': self.rule_curveto2,
'Q': self.rule_curveto2,
'q': self.rule_curveto2,
'T': self.rule_curveto1,
't': self.rule_curveto1,
'A': self.rule_elliptical_arc,
'a': self.rule_elliptical_arc,
}
self.number_tokens = set(['int', 'float'])
def parse(self, text):
""" Parse a string of SVG <path> data.
"""
next = self.lexer.lex(text).next
token = next()
return self.rule_svg_path(next, token)
def rule_svg_path(self, next, token):
commands = []
while token[0] is not EOF:
if token[0] != 'command':
raise SyntaxError("expecting a command; got %r" % (token,))
rule = self.command_dispatch[token[1]]
command_group, token = rule(next, token)
commands.append(command_group)
return commands
def rule_closepath(self, next, token):
command = token[1]
token = next()
return (command, None), token
def rule_moveto_or_lineto(self, next, token):
command = token[1]
token = next()
coordinates = []
while token[0] in self.number_tokens:
try:
pair, token = self.rule_coordinate_pair(next, token)
except:
pair, token = (0.0, 0.0), next()
coordinates.append(pair)
return (command, coordinates), token
def rule_orthogonal_lineto(self, next, token):
command = token[1]
token = next()
coordinates = []
while token[0] in self.number_tokens:
try:
coord, token = self.rule_coordinate(next, token)
except:
coord, token = 0.0, next()
coordinates.append(coord)
return (command, coordinates), token
def rule_curveto3(self, next, token):
command = token[1]
token = next()
coordinates = []
while token[0] in self.number_tokens:
try:
pair1, token = self.rule_coordinate_pair(next, token)
except:
try:
pair1, token = (0.0, 0.0), next()
except StopIteration:
break
try:
pair2, token = self.rule_coordinate_pair(next, token)
except:
try:
pair2, token = (0.0, 0.0), next()
except StopIteration:
break
try:
pair3, token = self.rule_coordinate_pair(next, token)
except:
#~ print_error()
#~ print self.lexer.text
try:
pair3, token = (0.0, 0.0), next()
except StopIteration:
break
coordinates.append((pair1, pair2, pair3))
return (command, coordinates), token
def rule_curveto2(self, next, token):
command = token[1]
token = next()
coordinates = []
while token[0] in self.number_tokens:
try:
pair1, token = self.rule_coordinate_pair(next, token)
except:
pair1, token = (0.0, 0.0), next()
try:
pair2, token = self.rule_coordinate_pair(next, token)
except:
pair2, token = (0.0, 0.0), next()
coordinates.append((pair1, pair2))
return (command, coordinates), token
def rule_curveto1(self, next, token):
command = token[1]
token = next()
coordinates = []
while token[0] in self.number_tokens:
try:
pair1, token = self.rule_coordinate_pair(next, token)
except:
pair1, token = (0.0, 0.0), next()
coordinates.append(pair1)
return (command, coordinates), token
def rule_elliptical_arc(self, next, token):
command = token[1]
token = next()
arguments = []
while token[0] in self.number_tokens:
rx = float(token[1])
if rx < 0.0:
raise SyntaxError("expecting a nonnegative number; got %r" % (token,))
token = next()
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
ry = float(token[1])
if ry < 0.0:
raise SyntaxError("expecting a nonnegative number; got %r" % (token,))
token = next()
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
axis_rotation = float(token[1])
token = next()
if token[1] not in ('0', '1'):
raise SyntaxError("expecting a boolean flag; got %r" % (token,))
large_arc_flag = bool(int(token[1]))
token = next()
if token[1] not in ('0', '1'):
raise SyntaxError("expecting a boolean flag; got %r" % (token,))
sweep_flag = bool(int(token[1]))
token = next()
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
x = float(token[1])
token = next()
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
y = float(token[1])
token = next()
arguments.append(((rx,ry), axis_rotation, large_arc_flag, sweep_flag, (x,y)))
return (command, arguments), token
def rule_coordinate(self, next, token):
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
x = float(token[1])
token = next()
return x, token
def rule_coordinate_pair(self, next, token):
# Inline these since this rule is so common.
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
x = float(token[1])
token = next()
if token[0] not in self.number_tokens:
raise SyntaxError("expecting a number; got %r" % (token,))
y = float(token[1])
token = next()
return (x,y), token
svg_path_parser = SVGPathParser()
|
# Import the needed referances
import pandas as pd
import numpy as np
import csv as csv
from sklearn.model_selection import cross_val_score
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC, LinearSVC
from sklearn.ensemble import RandomForestClassifier
from itertools import combinations
#Shuffle the datasets
from sklearn.utils import shuffle
from numpy import array,array_equal
#Learning curve
import matplotlib.pyplot as plt
from sklearn.model_selection import learning_curve
from sklearn.model_selection import ShuffleSplit
train_dataset = pd.read_csv('/home/gaurav/forest-type/train.csv')
test_dataset = pd.read_csv('/home/gaurav/forest-type/test.csv')
train_dataset.dtypes.value_counts()
nas = pd.concat([train_dataset.isnull().sum(), test_dataset.isnull().sum()], axis=1, keys=['Train Dataset', 'Test Dataset'])
full_dataset = [train_dataset, test_dataset]
# Remove constant features
def identify_constant_features(dataframe):
count_uniques = dataframe.apply(lambda x: len(x.unique()))
constants = count_uniques[count_uniques == 1].index.tolist()
return constants
constant_features_train = set(identify_constant_features(train_dataset))
train_dataset.drop(constant_features_train, inplace=True, axis=1)
test_dataset.drop(constant_features_train, inplace=True, axis=1)
from sklearn import preprocessing
colToScale=['Elevation','Aspect','Slope','Horizontal_Distance_To_Hydrology',
'Vertical_Distance_To_Hydrology', 'Horizontal_Distance_To_Roadways',
'Hillshade_9am', 'Hillshade_Noon', 'Hillshade_3pm',
'Horizontal_Distance_To_Fire_Points']
X_train = train_dataset.drop(["Cover_Type","Id"],axis=1)
Y_train = train_dataset["Cover_Type"]
X_test = test_dataset.drop("Id",axis=1).copy()
#from sklearn.preprocessing import StandardScaler
#for col in colToScale:
# scaler = min_max_scaler.fit(ll_data[col].values.reshape(-1,1).astype('float_'))
# xtrain_scale[col] = scaler.transform(X_train[col].values.reshape(-1,1).astype('float_'))
# test_scale[col] = scaler.transform(X_test[col].values.reshape(-1,1).astype('float_'))
from sklearn import cross_validation as cv
from sklearn.cross_validation import StratifiedKFold
from sklearn.model_selection import GridSearchCV
forest = RandomForestClassifier()
parameter_grid = {'max_depth':[1,2,3,4,5],'n_estimators': [50,100,150,200,250],'criterion': ['gini','entropy']}
cross_validation = StratifiedKFold(Y_train, n_folds=5)
grid_search = GridSearchCV(forest,
param_grid=parameter_grid,
cv=cross_validation)
grid_search.fit(X_train, Y_train)
print('Best score: {}'.format(grid_search.best_score_))
print('Best parameters: {}'.format(grid_search.best_params_))
clf_rf = grid_search
ypred_rf=clf_rf.predict(X_test)
ids=test_dataset['Id']
pd.DataFrame({'Id':ids,'Cover_Type':ypred_rf},
columns=['Id','Cover_Type']).to_csv('/home/gaurav/forest-type/o2.csv',index=False)
print(pd.Series(ypred_rf).value_counts(sort=False))
|
#!/usr/bin/env python
import unittest
from dominion import Card, Game, Piles
###############################################################################
class Card_Destrier(Card.Card):
def __init__(self):
Card.Card.__init__(self)
self.cardtype = Card.CardType.ACTION
self.base = Card.CardExpansion.MENAGERIE
self.desc = "+2 Cards; +1 Action; During your turns, this costs 1 less per card you've gained this turn."
self.name = "Destrier"
self.cards = 2
self.actions = 1
self.cost = 6
def hook_this_card_cost(self, game, player):
num_gained = len(player.stats["gained"])
return -num_gained
###############################################################################
class Test_Destrier(unittest.TestCase):
def setUp(self):
self.g = Game.TestGame(numplayers=1, initcards=["Destrier"])
self.g.start_game()
self.plr = self.g.player_list(0)
self.card = self.g["Destrier"].remove()
def test_play(self):
self.plr.piles[Piles.HAND].set()
self.plr.add_card(self.card, Piles.HAND)
self.plr.play_card(self.card)
self.assertEqual(self.plr.actions.get(), 1)
self.assertEqual(self.plr.piles[Piles.HAND].size(), 2)
###############################################################################
if __name__ == "__main__": # pragma: no cover
unittest.main()
# EOF
|
# Codeforces Beta Round #65
# Problem A -- Way Too Long Words
for _ in xrange(input()):
word = raw_input()
if len(word) > 10:
print word[0] + str(len(word) - 2) + word[-1]
else:
print word
|
import datetime
import gardenFunction
'''
Header of the program
'''
def printBanner():
print("*"*40)
print("|" +" "*38+"|")
print("|"+ " "*10 + "Garden Restaurant"+" "*10 +" |")
print("|" +" "*38+"|")
print("*"*40)
print()
print('Welcome to Garden Restaurant Booking Management System.')
#today date
now = datetime.datetime.now()
todayDate= now.strftime("%Y-%m-%d")
print('Today\'s Date: %s' % todayDate)
'''
List of venue
'''
def printVenue():
print("-"*40)
print('Venue')
print("-"*40)
print()
filename = 'Garden/venue.txt'
list = gardenFunction.readVenueList(filename)
for i in range(len(list)):
count = i +1
name = list[i]['name']
people = list[i]['max']
print('[' + str(count) + ']' + ' ' + name + ' ('+people+' persons)')
'''
List of menu package
'''
def printMenuPackage():
print('-'*40)
print('Menu Option')
print('-'*40)
print()
print('[1] RM768.88 Package \t[3] RM1118.88 Package')
print('[2] RM898.88 Package \t[4] RM1488.88 Package')
'''
Show menu list based on menu chosen
'''
def printPackage(menuList):
print('-'*40)
print('Menu List')
print('-'*40)
print()
filename = "Garden/menu/Menu"+str(menuList)+".txt"
#read file
list = gardenFunction.readItemList(filename)
for i in range(len(list)):
count = i + 1
print(str(count)+' . '+list[i])
print('\nIs the selected menu okay? \n[1] Yes\n[2] No, I want to reselect my menu\n')
flag=1
while (flag):
result = input('Please select a choice:')
if result in ['1','2']:
if result == '1':
flag=0
return True
else:
flag=0
return False
else:
print('Invalid choice.')
'''
Print entertainment list based on venue choices
'''
def printEntertainment(venueChoice,entertainmentList,venueList):
venue_name = venueList[int(venueChoice)-1]['name']
name_list = venue_name.split(' ')
first_venue_name =name_list[0]
entertain_list = []
for item in entertainmentList:
entertain_available = item['Availability']
if first_venue_name in entertain_available:
entertain_list.append(item['entertainments'])
if len(entertain_list) != 0:
print('\nEntertainment for %s' % venue_name)
count=1
select_list = []
for i in entertain_list:
print(' %s %s' % (count,i))
select_list.append(str(count))
count += 1
flag=1
while (flag):
choice = input('Please select choice:')
if choice in select_list:
return choice
flag=0
else:
print('Invalid choice.')
else:
print('\nNo entertainment available for this venue.')
return None
'''
Print Total Summary
'''
def printSummaryTotal (custName, contNum, numPeople, totalTable,
totalPrice, strVenue, strMenu,strEntertainment):
print('Your reservation has been confirmed.')
print("-"*30)
print('Booking Summary')
print("-"*30)
print('Customer Name : %s' % custName)
print('Contact Number: %s' % contNum)
print('No of People : %s' % numPeople)
print('Tables : %s' % totalTable)
print('Venue : %s' % strVenue)
print('Package : %s' % strMenu)
print('Add on : %s' % strEntertainment)
print()
print('Total Price : %s' % totalPrice)
|
import numpy as np
from collections import Counter
import itertools
import matplotlib.pyplot as plt
import random
import copy
from sklearn import metrics
def reduce_dataset(data, number):
'''A function to reduce the MNIST dataset by taking 100 random samples of each digit'''
Y=data[:,0]
labels_set=list(set(Y))
#Get index for each label
indices = { value : [ i for i, v in enumerate(Y) if v == value]
for value in labels_set}
#Get 100 uniform samples for each label
reduced_indices={}
for i in labels_set:
sample_index=np.random.choice(indices[i], size=int(number/10), replace=False, p=None)
reduced_indices[i]=sample_index
reduced_data_index=np.array(list(reduced_indices.values())).ravel()
np.random.shuffle(reduced_data_index)
reduced_dataset=data[reduced_data_index,:]
return reduced_dataset
def randomly_split_data(data, train_fra):
train_no= int(len(data)*train_fra)
index=np.arange(0,len(data),1)
np.random.shuffle(index)
train_index=index[:train_no]
test_index=np.setdiff1d(index, train_index)
return train_index, test_index
def Poly_Kernals(X,T,d):
'''Produce the Gram Matrix K of the Polynomial Kernel
X: shape N1xK
T: shape N2xK
d: degree of the polynomial
K: shape N1xN2
'''
N_1,K=X.shape
N_2,K=T.shape
Copy_X = np.repeat(X[:, :, np.newaxis], N_2, axis=2)
Copy_T = np.repeat(T[:, :, np.newaxis], N_1, axis=2)
New_T=np.einsum("ijk->kji",Copy_T)
K0=np.einsum("ijk,ijk->ijk",Copy_X,New_T)
K=np.sum(K0,axis=1)**d
return K
def Gaussian_Kernals(X,T,c):
'''Produce the Gram Matrix K of the Guassian Kernel
X: shape N1xK
T: shape N2xK
c: width of the gaussian kernel
K: shape N1xN2
'''
N_1,K=X.shape
N_2,K=T.shape
Copy_X = np.repeat(X[:, :, np.newaxis], N_2, axis=2)
Copy_T = np.repeat(T[:, :, np.newaxis], N_1, axis=2)
New_T=np.einsum("ijk->kji",Copy_T)
Diff=Copy_X-New_T
Diff_squared=np.einsum("ijk,ijk->ijk",Diff,Diff)
Diff_squared_sum=np.sum(Diff_squared,axis=1)
D=np.divide(Diff_squared_sum,2.)
K=np.exp(-c*D)
return K
def kernalise_data(train_data, val_data, test_data, n_class, n_epoch, param, kernel='poly'):
'''
A function to produce data of the correct form ready to use in kernel perceptron by
kernelising X and one-hot encoding Y, followed by extending the data according to the number
of trainning epoch. If Kernel=None, then kernel is not applied.
train_data: No_train x K
test_data: No_test x K
K_long: (No_train*n_epoch) x (No_train*n_epoch)
Y_one_hot_long: (No_train*n_epoch) x n_class
K_long_test: (No_train*n_epoch) x No_test
Y_one_hot_test: No_test x n_class
'''
K_long=None
Y_one_hot_long=None
K_long_val=None
Y_one_hot_val=None
K_long_test=None
Y_one_hot_test=None
if train_data is not None:
X=train_data[:,1:]
Y=train_data[:,0]
#One hot encoding Y
Y_one_hot = np.zeros((len(Y), n_class))
Y_one_hot[np.arange(len(Y)), list(map(int,Y))] = 1
#Make duplicate for training with multiple epochs
Y_one_hot_long=np.tile(Y_one_hot.T,n_epoch).T
if kernel=='poly':
K=Poly_Kernals(X,X,param)
A=np.tile(K.T,n_epoch).T
K_long=np.tile(A,n_epoch)
elif kernel=='gauss':
K=Gaussian_Kernals(X,X,param)
A=np.tile(K.T,n_epoch).T
K_long=np.tile(A,n_epoch)
else:
K_long=np.tile(X.T,n_epoch).T
if val_data is not None:
val_X=val_data[:,1:]
val_Y=val_data[:,0]
Y_one_hot_val = np.zeros((len(val_Y), n_class))
Y_one_hot_val[np.arange(len(val_Y)), list(map(int,val_Y))] = 1
if kernel=='poly':
val_K=Poly_Kernals(X,val_X,param)
K_long_val=np.tile(val_K.T,n_epoch).T
elif kernel=='gauss':
val_K=Gaussian_Kernals(X,val_X,param)
K_long_val=np.tile(val_K.T,n_epoch).T
else:
K_long_val=val_X
if test_data is not None:
test_X=test_data[:,1:]
test_Y=test_data[:,0]
Y_one_hot_test = np.zeros((len(test_Y), n_class))
Y_one_hot_test[np.arange(len(test_Y)), list(map(int,test_Y))] = 1
if kernel=='poly':
test_K=Poly_Kernals(X,test_X,param)
K_long_test=np.tile(test_K.T,n_epoch).T
elif kernel=='gauss':
test_K=Gaussian_Kernals(X,test_X,param)
K_long_test=np.tile(test_K.T,n_epoch).T
else:
K_long_test=test_X
return K_long, Y_one_hot_long, K_long_val, Y_one_hot_val, K_long_test, Y_one_hot_test
def perceptron_fulldata(X_tr, Y_tr, X_val, Y_val, X_test, Y_test, Param):
'''Run on the full dataset, compute Kernel in loop'''
k_class=10
#Matrix of alpha which each row represent each class, change column value after each iteration
W=np.zeros((k_class,len(X_tr)))
Train_error=[]
Val_error=[]
Y_pred_test_all=[]
train_error=0
val_error=0
test_error=0
for t in np.arange(1,len(X_tr),1):
K=np.zeros((len(X_tr),1))
# K[:t-1]=X_tr[:t-1,t] #change here if want to evaluate kernel in loop
K[:t-1]=Poly_Kernals(X_tr[:t-1],X_tr[t].reshape(1,256),d=Param)
WX=np.einsum('ij,jk->ik',W,K)
Y_hat = np.zeros_like(WX)
Y_hat[WX.argmax()] = 1 #choosing the class with the highest value
diff=Y_tr[t]-Y_hat.T[0]
train_error+=abs(diff).sum()
W[:,t]=diff
Train_error.append(train_error/(t*10))
if X_val is not None:
# Y_pred_val=np.einsum('ij,jk->ik',W[:,:t], X_val[:t]).T
for j in range(len(X_val)):
Y_pred_val=np.einsum('ij,jk->ik',W[:,:t], Poly_Kernals(X_tr[:t],X_val[t].reshape(1,256),d=Param)).T
B=np.zeros_like(Y_pred_val)
B[range(len(Y_pred_val)),Y_pred_val.argmax(1)]=1
val_error+=abs(B-Y_val).sum()
Val_error.append(val_error/(len(Y_val)*10))
val_error=0
if X_test is not None:
# WX_test=np.einsum('ij,jk->ik',W, X_test).T
for j in range(len(X_test)):
WX_test=np.einsum('ij,jk->ik',W[:,:t], Poly_Kernals(X_tr[:t],X_test[j].reshape(1,256),d=Param)).T
Y_pred_test=np.zeros_like(WX_test)
Y_pred_test[:,WX_test.argmax(1)]=1
test_error+=abs(Y_pred_test-Y_test[j]).sum()
Y_pred_test_all.append(Y_pred_test)
return Train_error, Val_error, test_error/(len(X_test)*10), Y_pred_test_all
def perceptron_quick(X_tr, Y_tr, X_val, Y_val, X_test, Y_test):
'''Quick run with kernalised data'''
k_class=10
#Matrix of alpha which each row represent each class, change column value after each iteration
W=np.zeros((k_class,len(X_tr)))
Train_error=[]
Val_error=[]
Y_pred_test_all=[]
train_error=0
val_error=0
test_error=0
for t in np.arange(1,len(X_tr),1):
WX=np.einsum('ij,jk->ik',W[:,:t],X_tr[:t,t].reshape(t,1))
Y_hat = np.zeros_like(WX)
Y_hat[WX.argmax()] = 1 #choosing the class with the highest value
diff=Y_tr[t]-Y_hat.T[0]
train_error+=abs(diff).sum()
W[:,t]=diff
Train_error.append(train_error/(t*10))
if X_val is not None:
Y_pred_val=np.einsum('ij,jk->ik',W[:,:t], X_val[:t]).T
# Y_pred_val=np.einsum('ij,jk->ik',W[:,:t], Poly_Kernals(X_tr[:t],X_val[t].reshape(1,256),d=Param)).T
B=np.zeros_like(Y_pred_val)
B[range(len(Y_pred_val)),Y_pred_val.argmax(1)]=1
val_error+=abs(B-Y_val).sum()
Val_error.append(val_error/(len(Y_val)*10))
val_error=0
if t>100:
if Train_error[-1] < np.mean(Val_error[20:]): #stop training if val error > train error
break
if X_test is not None:
WX_test=np.einsum('ij,jk->ik',W[:,:t+1], X_test[:t+1,:]).T
Y_pred_test=np.zeros_like(WX_test)
Y_pred_test[range(len(Y_test)),WX_test.argmax(1)]=1
test_error=abs(Y_pred_test-Y_test).sum()
Y_pred_test_all.append(Y_pred_test)
return Train_error, Val_error, test_error/(len(X_test)*10), Y_pred_test_all
def fast_training(data, param_set, epoch, run_no, kernel):
'''Quick run on reduced dataset with 0.8 training data and 0.2 test'''
Train_E=[]
Test_E=[]
for i in range(run_no):
print(i)
r_train_index, r_test_index=randomly_split_data(data, 0.8)
r_train_data=data[r_train_index]
r_test_data=data[r_test_index]
for d in param_set:
X_train, Y_train, _, _, X_test, Y_test = kernalise_data(r_train_data, None, r_test_data, n_class=10,
n_epoch=epoch, param=d, kernel=kernel)
train_error, _, test_error, _=perceptron_quick(X_train, Y_train, None, None, X_test, Y_test)
Train_E.append(train_error)
Test_E.append(test_error)
return np.array(Train_E), np.array(Test_E)
def five_fold_cross_val(data, n_epoch, run_no, param_set, kernel):
'''Quick run 5fold'''
train_val_index, test_index=randomly_split_data(data, 0.8)
train_val_data=data[train_val_index,:]
test_data=data[test_index,:]
test_e=[]
for n in range(run_no):
print(n)
np.random.shuffle(train_val_index)
five_fold_index=np.array(np.array_split(train_val_index,5))
for d in param_set:
for f in range(len(five_fold_index)):
error=0
val_index=five_fold_index[f]
train_index=np.setdiff1d(train_val_index, five_fold_index[f])
val_data=data[val_index,:]
train_data=data[train_index,:]
X_train, Y_train, X_val, Y_val, X_test, Y_test=kernalise_data(train_data, val_data, test_data, n_class=10,
n_epoch=n_epoch, param=d, kernel=kernel)
Train_error, Val_error, test_error, Y_pred_test_all=perceptron_quick(X_train, Y_train, X_val, Y_val, X_test, Y_test)
# five_fold_train_e.append(Train_error)
# five_fold_val_e.append(Val_error)
error+=test_error
test_e.append(test_error/5)
return test_e
def main():
full_data=np.loadtxt('zipcombo.dat.txt')
reduced_data=reduce_dataset(full_data, 1000)
D=np.arange(1,8,1)
Train_E, Test_E=fast_training(reduced_data, D, 5, 2, 'poly')
np.save('Training_error_20', Train_E)
np.save('Testing_error_20', Test_E)
Test_e_5fold=five_fold_cross_val(reduced_data, 5, 2, D, 'poly')
np.save('Test_error_20_5fold', Test_e_5fold)
C=[0.01,0.1,1,3,5,7,10]
Train_E_g, Test_E_g=fast_training(reduced_data, C, 5, 20, 'gauss')
np.save('Training_error_20_gaussian', Train_E_g)
np.save('Testing_error_20_gaussian', Test_E_g)
Test_e_5fold_g=five_fold_cross_val(reduced_data, 5, 20, D, 'gauss')
np.save('Test_error_20_5fold_gaussian', Test_e_5fold_g)
if __name__ == '__main__':
main()
|
from gurobipy import *
import numpy as np
import math
import readIn
def solve(full_path_instance):
nBakeryProducts, Timehorizon, l, h, K, a, d, s, st = readIn.readFile(full_path_instance)
Timehorizon = Timehorizon+1
#-Define model variables----------------------------------------------
model = Model("lotSizing")
# number of pieces of product i available at time t
x = {}
for i in range(1,nBakeryProducts+1):
for t in range(1,Timehorizon):
x[i,t] = model.addVar(lb=0,vtype=GRB.INTEGER, obj=0, name="x_"+str(i)+"_"+str(t))
# warehousing-costs for each product and timestep
y = {}
for i in range(1,nBakeryProducts+1):
for t in range(Timehorizon):
y[i,t] = model.addVar(lb=0,vtype=GRB.INTEGER, obj=0, name="y_"+str(i)+"_"+str(t))
# number of each product i in the warehouse at time t
z = {}
for i in range(1,nBakeryProducts+1):
for t in range(Timehorizon):
z[i,t] = model.addVar(lb=0,vtype=GRB.INTEGER, obj=0, name="z_"+str(i)+"_"+str(t))
# cost for changing currently produced product in the oven
u = {}
for t in range(1,Timehorizon):
u[t] = model.addVar(lb=0,vtype=GRB.INTEGER, obj=0, name="u_"+str(i)+"_"+str(t))
# will product i be produced at time t?
v = {}
for i in range(1,nBakeryProducts+1):
for t in range(1,Timehorizon):
v[i,t] = model.addVar(lb=0, ub = 1,vtype=GRB.INTEGER, obj=0, name="v_"+str(i)+"_"+str(t))
# what product will the oven be prepared for at the end of the period?
w = {}
for i in range(1,nBakeryProducts+1):
for t in range(Timehorizon):
w[i,t] = model.addVar(lb=0, ub = 1,vtype=GRB.INTEGER, obj=0, name="w_"+str(i)+"_"+str(t))
model.update()
#-Add constraints-----------------------------------------------
# add initial stock to the warehouse
for i in range(1,nBakeryProducts+1):
model.addConstr(z[i,0] == l[i-1], name = 'c0')
# write warehousing costs to y
for t in range(Timehorizon):
for i in range(1,nBakeryProducts+1):
model.addConstr(y[i,t] == z[i,t] * h[i-1], name = 'c1')
# add newly produced products to the warehouse(z), and release products the customers demand
for t in range(1,Timehorizon):
for i in range(1,nBakeryProducts+1):
model.addConstr(z[i,t] == z[i,t-1] + x[i,t] - d[i-1][t-1], name = 'c2')
# set x = currently produced products (use the time available to produce products and assign them to x)
for t in range(1,Timehorizon):
model.addConstr(quicksum(x[i,t]*a[i-1] for i in range(1,nBakeryProducts+1)) + quicksum(st[i-1][j-1]*v[j,t]*w[i,t-1] for i in range(1,nBakeryProducts+1) for j in range(1,nBakeryProducts+1)) <= (K[t-1]), name = 'c3')
# maximum 2 different products can be produced per timestep
for t in range(1,Timehorizon):
model.addConstr(quicksum(v[i,t] for i in range(1,nBakeryProducts+1)) <= 2, name = 'c4')
# only produce products, that may be produced currently (v = 1)
for t in range(1,Timehorizon):
for i in range(1,nBakeryProducts+1):
model.addConstr(x[i,t] == v[i,t]*x[i,t], name = 'c5')
# a product can only be produced, if the oven was prepared for it at the end of the previous period or at the current one
for t in range(1,Timehorizon):
for i in range(1,nBakeryProducts+1):
model.addConstr(v[i,t] == w[i,t-1]+w[i,t], name = 'c6')
# set w = product that the oven is prepared for at the begin of timestep t
for t in range(Timehorizon):
model.addConstr(quicksum(w[i,t] for i in range(1,nBakeryProducts+1)) == 1, name = 'c7')
# set u = cost to change product produced in the oven
for t in range(1,Timehorizon):
model.addConstr(u[t] == quicksum(w[i,t-1]*s[i-1][j-1]*w[j,t] for i in range(1,nBakeryProducts+1) for j in range(1,nBakeryProducts+1)), name = 'c8')
# set the objective to the sum of all costs (warehousing costs + changing product costs + warehousing costs for step 0)
model.setObjective(quicksum(y[i,t] for i in range(1,nBakeryProducts+1) for t in range(1, Timehorizon)) + quicksum(u[t] for t in range(1, Timehorizon)) + quicksum(l[i-1]*h[i-1] for i in range(1,nBakeryProducts+1)))
model.optimize()
#-Print result----------------------------------------------
if model.status == GRB.status.OPTIMAL:
for i in range(1,nBakeryProducts+1):
#print('Von Ware %s sind zum Zeitpunkt %s genau %s Teile auf Lager .' % (i, t, z[i,t].x))
break
'''
for t in range(1,Timehorizon):
for i in range(1,nBakeryProducts+1):
print('x_%s_%s: %s' % (i,t, x[i,t].x))
'''
return model
# call the method to see the results
solve('lotData1.txt')
|
string = input()
dictionary = {}
for char in string:
if char not in dictionary and char != " ":
dictionary[char] = 0
if char != " ":
dictionary[char] += 1
for key, value in dictionary.items():
print(f"{key} -> {value}") |
#Abrir un archivo
miArchivo = open("miArchivo.txt","w")
#Obetener informacion de esre archivo
print("Name:", miArchivo.name)
print("esta cerrado:", miArchivo.closed)
print("modo abierto:", miArchivo.mode)
#Escribir algo al archivo
miArchivo.write ("Me encanta Python")
miArchivo.write("Me encanta Disney")
miArchivo.close()
#Agregar el archivo
miArchivo = open("miArchivo.txt","a")
miArchivo.write("y tambien c++")
|
from Extras.extras import *
PawnValue = 1
KnightValue = 2
BishopValue = 3
RookValue = 4
QueenValue = 5
KingValue = 6
class Piece:
#param Piece value (integer), CPU if it's computer's piece then CPU==True else CPU# ==False
def __init__(self,CPU,posX,posY):
self.posX=posX #Position X - row
self.posY=posY #PositionY - column
self.value=0
self.CPU=CPU
self.isMoved = False
#Returns piece value if it's computer's piece else return piece value multiplied by -1
def getId(self):
if(self.CPU==True):
return self.value
else:
return self.value*-1
#sets new piece position on board
def setPosition(self,posX,posY):
self.posX=posX
self.posY=posY
self.isMoved = True
#Return current postion (tuple (posX,posY))
def getPosition(self):
return self.posX,self.posY
def getMoves(self,board,gmCallA=False):
pass
class Pawn(Piece): #Value =1
def __init__(self,CPU,posX,posY):
super(self.__class__, self).__init__(CPU,posX,posY)
self.value=PawnValue
def getMoves(self,gameBoard,gmCallA=False):
board = gameBoard.getIntBoard()
X = self.posX; Y = self.posY; movesList = []
if(self.CPU==True):
s=1;fRow=1 # s - Move direction, fRow- starting row
else:
s=-1;fRow=6
#get Moves
if (inRange(X + s*1, Y) and board[X + s*1, Y] == 0 ):
movesList.append((X, Y, X + s*1, Y))
if(inRange(X+s*2,Y) and board[X+s*2,Y]==0and X==fRow): #First move of this Pawn
movesList.append((X,Y,X+s*2,Y))
#Get posible strikes
for t in [(X+s*1,Y+1),(X + s*1, Y-1)]:
if (inRange(t[0],t[1]) and isEnemy(board[t[0],t[1]],self.CPU)):
movesList.append((X, Y,t[0], t[1]) )
return movesList
class Knight(Piece):
def __init__(self,CPU,posX,posY,reCallable=False):
super(self.__class__, self).__init__(CPU,posX,posY)
self.value=KnightValue
def getMoves(self,gameBoard,gmCallA=False):
board = gameBoard.getIntBoard()
X = self.posX;Y = self.posY;movesList = []
#Get all possible tatgetss
targets = [(X+2,Y-1),(X+2,Y+1),(X-2,Y-1),(X-2,Y+1),(X+1,Y-2),(X+1,Y+2),(X-1,Y-2),(X-1,Y+2)]
for t in targets:
if (inRange(t[0], t[1]) and(board[t[0], t[1]] == 0 or isEnemy(board[t[0], t[1]],self.CPU))):
movesList.append((X, Y,t[0], t[1]))
return movesList
class Bishop(Piece):
def __init__(self,CPU,posX,posY):
super(self.__class__, self).__init__(CPU,posX,posY)
self.value=BishopValue
def getMoves(self,gameBoard,gmCallA=False):
board = gameBoard.getIntBoard()
X = self.posX;Y = self.posY;movesList = []
for x in [(-1,-1),(-1,1),(1,-1),(1,1)]:
for i in range(1,8):
X2 = X + (i * x[0]);Y2 = Y + (i * x[1])
# Check if field is occupied by your piece and if is in still on board
if(inRange(X2,Y2)==False or isFriend(board[X2,Y2],self.CPU)==True):
break
movesList.append((X, Y,X2,Y2))
# Finish discovering moves in this direction if you found enemyPiece
if(isEnemy(board[X2,Y2],self.CPU)==True):
break
return movesList
class Rook(Piece):
def __init__(self,CPU,posX,posY):
super(self.__class__, self).__init__(CPU,posX,posY)
self.value=RookValue
def getMoves(self,gameBoard,gmCallA=False):
board = gameBoard.getIntBoard()
X = self.posX;Y = self.posY;movesList = []
for x in [(-1,0),(1,0),(0,-1),(0,1)]:
for i in range(1,8):
X2 = X + (i * x[0]);Y2 = Y + (i * x[1])
# Check if field is occupied by your piece and if is in still on board
if(inRange(X2,Y2)==False or isFriend(board[X2,Y2],self.CPU)==True):
break
movesList.append((X, Y,X2,Y2))
# Finish discovering moves in this direction if you found enemyPiece
if(isEnemy(board[X2,Y2],self.CPU)==True):
break
return movesList
class Queen(Piece):
def __init__(self,CPU,posX,posY,danerousCord=None):
super(self.__class__, self).__init__(CPU,posX,posY)
self.value=QueenValue
def getMoves(self,gameBoard,gmCallA=False):
board = gameBoard.getIntBoard()
X = self.posX;Y = self.posY;movesList = []
for x in [(-1,0),(1,0),(0,-1),(0,1),(-1,-1),(-1,1),(1,-1),(1,1)]:
for i in range(1,8):
X2 = X+(i*x[0]) ; Y2 = Y+(i*x[1])
#Check if field is occupied by your piece and if is in still on board
if(inRange(X2,Y2)==False or isFriend(board[X2,Y2],self.CPU)==True):
break
movesList.append((X, Y,X2,Y2))
#Finish discovering moves in this direction if you found enemyPiece
if(isEnemy(board[X2,Y2],self.CPU)==True):
break
return movesList
class King(Piece):
def __init__(self,CPU,posX,posY):
super(self.__class__, self).__init__(CPU,posX,posY)
self.value=KingValue
def getMoves(self,gameBoard,gmCallA=False):
board = gameBoard.getIntBoard()
dangerousCords = None
if(gmCallA==True):
#Get positions which can make my king checked
dangerousCords = [(x[2],x[3]) for x in gameBoard.getAllMoves(not self.CPU)]
if(len(dangerousCords)==0):
dangerousCords=None;
X = self.posX;Y = self.posY;movesList = []
for x in [(-1,0),(1,0),(0,-1),(0,1),(-1,-1),(-1,1),(1,-1),(1,1)]:
X2 = X + (1 * x[0]);Y2 = Y + (1 * x[1])
if (inRange(X2, Y2) == True and isFriend(board[X2, Y2],self.CPU) == False):
#Check if target field is dangerous
if(dangerousCords is None or ((X2,Y2) not in dangerousCords)):
#check if it borders with enemy king
if(borders(board,X2,Y2,self.getId()*-1)==False):
movesList.append((X, Y, X2, Y2))
castling = self.checkCastlingPosiblity(gameBoard,dangerousCords)
if(castling is not None):
movesList = movesList+ castling
return movesList
def checkCastlingPosiblity(self,gameBoard,dangerousCords):
if(self.isMoved==True):
return None
board = gameBoard.getIntBoard()
moves = []
if(self.getId()>0):
startX =0
else:
startX = 7
if(self.posX != startX or self.posY !=4 or isIn((startX,6) ,dangerousCords)):
return None
if(abs(board[startX,7])==RookValue and gameBoard.board[startX,7].isMoved ==False):
ispos =True
for i in range(5,7):
if(board[startX,i]!=0 or isIn((startX,i) ,dangerousCords)):
ispos = False
break
if(ispos ==True and isIn((startX,6) ,dangerousCords)==False):
moves.append((startX, self.posY, startX, 6))
if (abs(board[startX, 0]) == RookValue and gameBoard.board[startX,0].isMoved ==False):
ispos = True
for i in range(2,4):
if(board[startX,i]!=0 or isIn((startX,i) ,dangerousCords)):
ispos = False
break
if(ispos ==True and isIn((startX,2) ,dangerousCords) ==False):
moves.append((startX,self.posY,startX,2))
return moves |
flag = ""
key = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f']
for x in range(46):
_file = "file%s.txt" % x
enc = open(_file, "r").read().split()
missing = enc.index("*")
flag += key[missing]
print flag.decode("hex")
# Each of the text files has a character replaced with a star. If we take all the characters that are
# "MIA", we can piece together the hex encoded version of the flag. Decode the hex to get the actual flag:
# flag{m1Ss1ng_1N_4cT10n}
|
####################################################################################
## Runner script | Handles arguments, creates contexts and runs the ETL process. ##
####################################################################################
import sys
from aws2hive import etl
from pyspark import SparkContext, SparkConf
from pyspark.sql import HiveContext
if __name__ == '__main__':
# Handle arguments
bucket_name = sys.argv[1]
path_prefix = sys.argv[2]
dataset_dir = sys.argv[3]
hive_table_name = sys.argv[4]
# Create Spark and Hive contexts
conf = SparkConf().setAppName('AWS2Hive ETL').setMaster('local[*]')
sc = SparkContext(conf=conf)
hiveContext = HiveContext(sc)
# Run the ETL process
print('Starting the ETL process..\n')
etl.run_etl(hiveContext, bucket_name, path_prefix, dataset_dir, hive_table_name)
print('The ETL process has been completed.\n') |
def dfs(i):
global count
for j in array[i]:
count+=1
dfs(j)
from collections import deque
def bfs(v):
q=deque([v])
visited=[False]*(n+1)
visited[v]=True
count=1
while q:
v = q.popleft()
for e in array[v]:
if not visited[e]:
q.append(e)
visited[e]=True
count+=1
return count
n,m=map(int,input().split())
array=[[] for _ in range(n+1)]
countlist=[[] for _ in range(n+1)]
for _ in range(m):
b,a=map(int,input().split())
array[a].append(b)
for i in range(n+1):
x = bfs(i)
countlist[i]=x
index=-1
for i in countlist:
index+=1
if i==max(countlist):
print(index,end=" ")
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import argparse
import sys
def atualizaLog(log, imageLog):
logLines = []
imageLogLines = imageLog.readlines()
for log in log.readlines():
log = log.strip().split(';')
logLines.append(log)
for line in imageLogLines:
line = line.strip().split(';')
if len(line) == 2:
fileName = line[0]
valorAEEJ = line[1]
for log in logLines:
if len(log) == 15:
if log[14] == fileName:
log[9] = valorAEEJ
break
for i in range(0,len(logLines)):
logLines[i] = ';'.join(logLines[i])
return '\n'.join(logLines) + '\n'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--log','-l', type=argparse.FileType('r'), help='Arquivo de log gerado pela biblioteca avalgame', required=True)
parser.add_argument('--imageLog','-i', type=argparse.FileType('r'), help='Arquivo de log gerado pela pela rede neural', required=True)
parser.add_argument('--output', '-o', type=argparse.FileType('w'), default=sys.stdout, help='Arquivo de log atualizado')
args = parser.parse_args()
newLog = atualizaLog(args.log, args.imageLog)
args.output.write(newLog)
|
from django.db import models
from .Usuario import Usuario
class Aluno(Usuario):
curso = models.ForeignKey(to='Curso', related_name="alunos", null=False, blank=False) #onetomany
#nome = models.CharField(max_length=120,null=False)
#email = models.CharField(max_length=80)
celular = models.CharField(max_length=11, null=True, blank=True)
#ra = models.IntegerField(unique=True,null=False)
turmas = models.ManyToManyField('Turma', db_table='Matricula', related_name='alunos', blank=True)
foto = models.ForeignKey(to='ArquivosFoto', related_name="alunos", null=True, blank=True) #onetomany
def __str__(self):
return "{} - {}".format(self.ra,self.nome)
class Meta:
db_table = 'Aluno'
from .Curso import Curso
from .Turma import Turma
from .ArquivosFoto import ArquivosFoto
|
print("Please enter a positive integer")
required_count = int(input())
count = 1
# required_count = 3
# count = 4
# Display:
# Count: 1
# Count: 2
# Count: 3
print("Start counting")
while count <= required_count: # The following runs as long as this condition is true
print("Count: {0}".format(count))
count += 1 # count = count + 1
|
from django.shortcuts import render, HttpResponse
def Classifieds(request):
#return HttpResponse("Welcome")
return render(request,'classifieds.html', {}) |
from rest_framework_nested import routers
class SimpleRouter(routers.SimpleRouter):
"""A little secret sauce that adds the router attribute onto the viewset if possible
"""
def __init__(self, *args, name='root', **kwargs):
self.name = name
super().__init__(*args, **kwargs)
def register(self, prefix, viewset, *args, **kwargs):
viewset.router = self
return super().register(prefix, viewset, *args, **kwargs)
def get_default_basename(self, viewset):
return viewset.get_base_name()
class NestedRouter(routers.NestedSimpleRouter):
"""A little secret sauce that adds the router attribute onto the viewset if possible
"""
def __init__(self, *args, name='nested', root_viewset_cls=None, **kwargs):
self.name = name
self.root_viewset_cls = root_viewset_cls
try:
super().__init__(*args, **kwargs)
except RuntimeError:
raise ValueError(self.root_viewset_cls)
def register(self, prefix, viewset, *args, **kwargs):
return super().register(prefix, viewset, *args, **kwargs)
def get_default_basename(self, viewset):
return viewset.get_base_name()
|
from __future__ import print_function
import pickle
import os.path
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
# If modifying these scopes, delete the file token.pickle.
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
# The ID and range of a sample spreadsheet. Please replace this text with the ID of your Google Sheet.
SAMPLE_SPREADSHEET_ID = '<PLEASE PUT IN THE ID OF YOUR SHEET>'
# Correlates to the all columns in the first 103 rows of the sheet. This might need to be changed
SAMPLE_RANGE_NAME = '!1:103'
def setup():
# Initializes setup with Spreadsheet.
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_local_server(port=0)
# Save the credentials for the next run
with open('token.pickle', 'wb') as token:
pickle.dump(creds, token)
service = build('sheets', 'v4', credentials=creds)
# Call the Sheets API and setups the sheet object
sheet = service.spreadsheets()
return sheet
def getStudentResponses(sheet):
# Gets student responses and loads them into a list
result = sheet.values().get(spreadsheetId=SAMPLE_SPREADSHEET_ID,
range=SAMPLE_RANGE_NAME).execute()
values = result.get('values', [])
if not values:
print('No data found.')
# Filter values to grab just the cheat detection questions (marked with '***')
question_indexes = []
cell_count = 0
for row in values:
for cell in row:
if (cell[:3] == '***'):
question_indexes.append(cell_count)
cell_count += 1
# We just want to look at the first row on this iteration
break
# Populate a dictionary with the keys being student emails and responses being values
student_responses = {}
is_first = True
for row in values:
# Skip the first row because this doesn't include student data
if (is_first == True):
is_first = False
continue
cell_count = 0
for cell in row:
# Add Email as key
if cell_count == 1:
student_email = cell
# Initialize value as empty list
student_responses[cell] = []
if cell_count > 1:
for question_index in question_indexes:
if cell_count == question_index:
# Append the student response to the list of their responses
student_responses[student_email].append(cell)
cell_count += 1
return student_responses |
# This code is part of OpenFE and is licensed under the MIT license.
# For details, see https://github.com/OpenFreeEnergy/openfe
import click
import glob
import itertools
import pathlib
from plugcli.params import MultiStrategyGetter, Option, NOT_PARSED
# MOVE TO GUFE ####################################################
def _smcs_from_sdf(sdf):
from openfe import SmallMoleculeComponent
from rdkit import Chem
supp = Chem.SDMolSupplier(str(sdf), removeHs=False)
mols = [SmallMoleculeComponent(mol) for mol in supp]
return mols
def _smcs_from_mol2(mol2):
from openfe import SmallMoleculeComponent
from rdkit import Chem
rdmol = Chem.MolFromMol2File(str(mol2), removeHs=False)
return [SmallMoleculeComponent.from_rdkit(rdmol)]
def load_molecules(file_or_directory):
"""
Load SmallMoleculeComponents in the given file or directory.
This always returns a list. The input can be a directory, in which all
files ending with .sdf are loaded as SDF and all files ending in .mol2
are loaded as MOL2.
Parameters
----------
file_or_directory : pathlib.Path
Returns
-------
list[SmallMoleculeComponent]
"""
inp = pathlib.Path(file_or_directory) # for shorter lines
if inp.is_dir():
sdf_files = [f for f in inp.iterdir() if f.suffix.lower() == ".sdf"]
mol2_files = [f for f in inp.iterdir()
if f.suffix.lower() == ".mol2"]
else:
sdf_files = [inp] if inp.suffix.lower() == ".sdf" else []
mol2_files = [inp] if inp.suffix.lower() == ".mol2" else []
if not sdf_files + mol2_files:
raise ValueError(f"Unable to find molecules in {file_or_directory}")
sdf_mols = sum([_smcs_from_sdf(sdf) for sdf in sdf_files], [])
mol2_mols = sum([_smcs_from_mol2(mol2) for mol2 in mol2_files], [])
return sdf_mols + mol2_mols
# END MOVE TO GUFE ################################################
def molecule_getter(user_input, context):
return load_molecules(user_input)
MOL_DIR = Option(
"-M",
"--molecules",
type=click.Path(exists=True),
help=(
"A directory or file containing all molecules to be loaded, either"
" as a single SDF or multiple MOL2/SDFs."
),
getter=molecule_getter,
)
COFACTORS = Option(
"-C", "--cofactors",
type=click.Path(exists=True),
help="Path to cofactors sdf file. This may contain multiple molecules",
getter=molecule_getter,
) |
from nltk import word_tokenize, pos_tag
from nltk.corpus import wordnet as wn
def penn_to_wn(tag):
""" Convert between a Penn Treebank tag to a simplified Wordnet tag """
if tag.startswith('N'):
return 'n'
if tag.startswith('V'):
return 'v'
if tag.startswith('J'):
return 'a'
if tag.startswith('R'):
return 'r'
return None
def tagged_to_synset(word, tag):
wn_tag = penn_to_wn(tag)
if wn_tag is None:
return None
try:
return wn.synsets(word, wn_tag)[0]
except:
return None
def sentence_similarity(sentence1, sentence2):
""" compute the sentence similarity using Wordnet """
# Tokenize and tag
sentence1 = pos_tag(word_tokenize(sentence1))
sentence2 = pos_tag(word_tokenize(sentence2))
# Get the synsets for the tagged words
synsets1 = [tagged_to_synset(*tagged_word) for tagged_word in sentence1]
synsets2 = [tagged_to_synset(*tagged_word) for tagged_word in sentence2]
# Filter out the Nones
synsets1 = [ss for ss in synsets1 if ss]
synsets2 = [ss for ss in synsets2 if ss]
score, count = 0.0, 0
# For each word in the first sentence
for synset in synsets1:
# Get the similarity value of the most similar word in the other sentence
best_score = max([synset.path_similarity(ss) for ss in synsets2])
# Check that the similarity could have been computed
if best_score is not None:
score += best_score
count += 1
# Average the values
score /= count
return score
def symmetric_sentence_similarity(sentence1, sentence2):
""" compute the symmetric sentence similarity using Wordnet """
return (sentence_similarity(sentence1, sentence2) + sentence_similarity(sentence2, sentence1)) / 2
def find_item(scentence):
items = ["box", "sphere", "pen", "ship", "glass", "table", "coffee", "bottle"]
for item in items:
if item in scentence:
return item
def find_name(scentence):
names = ["Fussel", "Phine", "Peter", "Justin"]
for name in names:
if name in scentence:
return name
def adjust_scentences(sentences, data):
for key, value in data.iteritems():
print key, value
adjusted = []
for sentence in sentences:
print "Before: " + sentence
if key in sentence:
adjusted_scentence = sentence.replace(key, value)
adjusted.append(adjusted_scentence)
print "After: " + adjusted_scentence
sentences = adjusted
return sentences
if __name__ == "__main__":
import prepare
print "Start"
prepare.check_nltk_data_packages()
sentences = [
"PERSON be MOOD.",
"PERSON follow the ITEM.",
"Can you smile PERSON?",
"PERSON please walk to the LOCATION.",
"PERSON look at me.",
"PERSON can you bring me the ITEM?",
"PERSON bring me the ITEM."
]
focus_sentence = "Take the pen and bring it to me immediately, Fussel."
print "Orig Scentence: {}".format(focus_sentence)
item = find_item(focus_sentence)
name = find_name(focus_sentence)
data = {"PERSON": name, "ITEM": item}
sentences = adjust_scentences(sentences, data)
print "Adjust Scentence: {}".format(focus_sentence)
print ""
for sentence in sentences:
score = symmetric_sentence_similarity(focus_sentence, sentence)
print "Score: '{}' : {}".format(sentence, score)
print ""
print "End" |
#!/usr/bin/env python
# -*- coding:utf8 -*-
from deepdive import *
import re
import handle_string
import divlaw
def lenIterator(list):
sum = 0
for i in list :
sum += 1
return sum
def getTitle(string):
temp = re.finditer(r"(\s|\n|\*|\_|\#){0,10}(\“|\")+.{2}",string,re.DOTALL)
end_title = len(string)
if lenIterator(temp) > 0 :
temp = re.finditer(r"(\s|\n|\*|\_|\#){0,10}(\“|\")",string,re.DOTALL)
for i in temp:
end_title = i.start()
break
return string[:end_title]
def get_numerical_symbol(title):
result = []
#get_title1 = re.search(r'(của\s.*)\s(đã được|được)',title)
get_title = re.search(r'[0-9]+(/[0-9]+)*((/|-)[A-ZĐƯ]+[0-9]*)+(\s|\_|\#|\*|\.)',title,re.M|re.I)
# get_id = re.search(r'[0-9]+(/[0-9]+)*((/|-)[A-ZĐ]+[0-9]*)+',get_content.group())
# get_title1 = re.search(r'([0-9]+(/[0-9]+)*((/|-)[A-ZĐ]+[0-9]*)\s(đã được))|([0-9]+(/[0-9]+)*((/|-)[A-ZĐ]+[0-9]*)\s(được))',title)
#if(get_title1 is not None):
# number = re.search(r'[0-9]+(/[0-9]+)*((/|-)[A-ZĐƯ]+[0-9]*)+(\s|\_|\#|\*|\.)',get_title1.group())
# if(number is not None):
# return (re.search(r'[0-9]+(/[0-9]+)*((/|-)[A-ZĐƯ]+[0-9]*)+',number.group(),re.I)).group()
if (get_title is not None):
result.append((re.search(r'[0-9]+(/[0-9]+)*((/|-)[A-ZĐƯ]+[0-9]*)+',get_title.group(),re.I)).group())
result.append(findDate(title[get_title.start() - 1:len(title)]))
return result
return None
def findDate(string):
date_checker = re.search(r'(.(?!\\n))+ngày \d{1,2} tháng \d{1,2} năm \d{2,4}',string,re.U|re.I|re.DOTALL)
if date_checker is not None:
date_match = re.search(r'ngày \d{1,2} tháng \d{1,2} năm \d{2,4}',date_checker.group(0),re.U|re.I)
if date_match.start() <= 30:
return extractDate(date_match.group(0))
date_checker = re.search(r'(.(?!\\n))+\d{1,2}\/\d{1,2}\/\d{2,4}',string,re.U|re.I|re.DOTALL)
if date_checker is not None:
date_match = re.search(r'\d{1,2}\/\d{1,2}\/\d{2,4}',date_checker.group(0),re.U|re.I)
if date_match.start() <= 30:
return extractDate(date_match.group(0))
date_checker = re.search(r'(.(?!\\n))+\d{1,2}-\d{1,2}-\d{2,4}',string,re.U|re.I|re.DOTALL)
if date_checker is not None:
date_match = re.search(r'\d{1,2}-\d{1,2}-\d{2,4}',date_checker.group(0),re.U|re.I)
if date_match.start() <= 30:
return extractDate(date_match.group(0))
return None
def extractDate(string):
date = None
end_last = 0;
dd = re.search(r'\d{1,2}',string,re.U|re.M)
if dd is not None:
end_last += dd.end(0)
date = dd.group(0)
else :
return None
dd = re.search(r'\d{1,2}',string[end_last:],re.U|re.M)
if dd is not None:
end_last += dd.end(0)
date = dd.group(0) + '-' + date
else :
return None
dd = re.search(r'\d{2,4}',string[end_last:],re.U|re.M)
if dd is not None:
if len(dd.group(0)) == 2:
date = '19' +dd.group(0) + '-' + date
elif len(dd.group(0)) != 3:
date = dd.group(0) + '-' + date
else :
return None
else :
return None
return date
@tsv_extractor
@returns(lambda
law_id ="text",
type = "int",
doc_content_update = "text",
symbol = "text",
position = "text",
modified_law_date_release = "text"
:[])
def extract(
law_id = "text",
totalLaw = "int",
law_content = "text",
law_len = "int",
totalItem = "int",
item_content = "text",
item_len = "int",
totalpoint = "int",
point_content = "text",
part_index ="int",
chap_index ="int",
sec_index ="int",
law_index ="int",
item_index ="int",
point_index ="int",
numerical_symbol = "text",
date_released ="text"
):
doc_content_update = None
if law_content is not None:
# law_content = handle_string.to_unicode(law_content)
law_content = law_content[:law_len]
# pass
# law_content = law_content.encode('utf-8')
if (item_content is not None) :
# # item_content = handle_string.to_unicode(item_content)
# # if item_len != len(item_content):
item_content = item_content[:item_len]
# pass
# item_content = item_content.encode('utf-8')
number = None
type = 0
point = 0
p = re.compile(r'((((S|s)ửa đổi)(\s|\,)*((b|B)ổ sung)*)|((b|B)ổ sung))')
p1= re.compile(r'(đã\s|đã được\s)((((S|s)ửa đổi)(\s|\,)*((b|B)ổ sung)*)|((b|B)ổ sung))')
position = "0_0_0_0_0_0"
if(totalpoint > 0):
if(numerical_symbol not in getTitle(point_content)):
number = get_numerical_symbol(getTitle(point_content))
if(number is not None):
numerical_symbol = number[0]
date_released = number[1]
elif (numerical_symbol not in getTitle(item_content)):
number = get_numerical_symbol(getTitle(item_content))
if(number is not None):
numerical_symbol = number[0]
date_released = number[1]
elif (numerical_symbol not in getTitle(law_content)):
number = get_numerical_symbol(getTitle(law_content))
if(number is not None):
numerical_symbol = number[0]
date_released = number[1]
position = "{}_{}_{}_{}_{}_{}".format(part_index+1,chap_index+1,sec_index+1,law_index+1,item_index+1,point_index+1)
type_modify = re.search(r'(((b|B)ổ sung cụm từ)|((b|B)ổ sung từ))',point_content)
if(type_modify is not None):
type = 3
doc_content_update = point_content
point = 1
else :
type_change_name = re.search(r'(S|s)ửa đổi tên',point_content)
if(type_change_name is not None):
type = 6
doc_content_update = point_content
point = 1
else:
type_delete = re.search(r'(b|B)ãi bỏ',point_content)
inQuote = False
if type_delete is not None :
inQuote = divlaw.itemInQuote(point_content,type_delete.start())
if(type_delete is not None) and not inQuote:
type = 2
doc_content_update = point_content
point = 1
else:
type_delete_text = re.search(r'(((b|B)ỏ cụm từ)|((b|B)ỏ từ))',point_content)
if(type_delete_text is not None):
type = 7
doc_content_update = point_content
point =1
else:
type_add_text = p.finditer(point_content)
type_add_text1 = p1.finditer(point_content)
len1 = lenIterator(type_add_text)
len2 = lenIterator(type_add_text1)
if( (len1 != len2) and (len1 > 0)):
type = 1
doc_content_update = point_content
point = 1
else :
# type_change_text = re.search(r'(t|T)hay\s.*cụm từ',point_content)
type_change_text = re.search(r'((t|T)hay\s)*(cụm\s)*từ\s.*(được\s)*(thay\s)*bằng\s(cụm\s)*từ',point_content)
if(type_change_text is not None):
type = 4
doc_content_update = point_content
point = 1
else :
type_name_to_name = re.search(r'((t|T)ên của\s).+(((S|s)ửa đổi\s)(\,\s)*((b|B)ổ sung\s)*)(thành)',point_content)
if(type_name_to_name is not None):
type = 5
doc_content_update =point_content
point = 1
else :
point = 0
if(totalItem > 0):
if(numerical_symbol not in getTitle(item_content)):
number = get_numerical_symbol(getTitle(item_content))
if(number is not None):
numerical_symbol = number[0]
date_released = number[1]
elif (numerical_symbol not in getTitle(law_content)):
number = get_numerical_symbol(getTitle(law_content))
if(number is not None):
numerical_symbol = number[0]
date_released = number[1]
position = "{}_{}_{}_{}_{}_{}".format(part_index+1,chap_index+1,sec_index+1,law_index+1,item_index+1,0)
type_modify = re.search(r'(b|B)ổ sung cụm từ',item_content)
if(type_modify is not None):
type = 3
doc_content_update = item_content
point = 1
else:
type_change_name = re.search(r'(S|s)ửa đổi tên',item_content)
if(type_change_name is not None):
type = 6
doc_content_update = item_content
point = 1
else:
type_delete = re.search(r'(b|B)ãi bỏ',item_content)
inQuote = False
if type_delete is not None :
inQuote = divlaw.itemInQuote(item_content,type_delete.start())
if(type_delete is not None) and not inQuote:
type = 2
doc_content_update = item_content
point = 1
else:
type_delete_text = re.search(r'(((b|B)ỏ cụm từ)|((b|B)ỏ từ))',item_content)
if(type_delete_text is not None):
type = 7
doc_content_update = item_content
point = 1
else:
# type_add_text = re.search(r'((((S|s)ửa đổi)(\s|\,)*((b|B)ổ sung)*)|((b|B)ổ sung))',item_content)
# if(type_add_text is not None):
type_add_text = p.finditer(item_content)
type_add_text1 = p1.finditer(item_content)
len1 = lenIterator(type_add_text)
len2 = lenIterator(type_add_text1)
if( (len1 != len2) and (len1 > 0)):
type = 1
doc_content_update = item_content
point=1
else:
# type_change_text = re.search(r'(t|T)hay\s.*cụm từ',item_content)
type_change_text = re.search(r'((t|T)hay\s)*(cụm\s)*từ\s.*(được\s)*(thay\s)*bằng\s(cụm\s)*từ',item_content)
if(type_change_text is not None):
type = 4
doc_content_update = item_content
point = 1
else :
type_name_to_name = re.search(r'((t|T)ên của\s).+(((S|s)ửa đổi\s)(\,\s)*((b|B)ổ sung\s)*)(thành)',item_content)
if(type_name_to_name is not None):
type = 5
doc_content_update = item_content
point = 1
else :
point = 0
# if(totalpoint > 0 and point == 1 ):
# doc_content_update = point_content
if(totalLaw >0):
if (numerical_symbol not in getTitle(law_content)):
number = get_numerical_symbol(getTitle(law_content))
if(number is not None):
numerical_symbol = number[0]
date_released = number[1]
position = "{}_{}_{}_{}_{}_{}".format(part_index+1,chap_index+1,sec_index+1,law_index+1,0,0)
type_modify = re.search(r'(b|B)ổ sung cụm từ',law_content)
if(type_modify is not None):
type = 3
doc_content_update = law_content
point = 1
else:
type_change_name = re.search(r'(S|s)ửa đổi tên',law_content)
if(type_change_name is not None):
type = 6
doc_content_update = law_content
point = 1
else:
type_delete = re.search(r'(b|B)ãi bỏ',law_content)
inQuote = False
if type_delete is not None :
inQuote = divlaw.itemInQuote(law_content,type_delete.start())
if(type_delete is not None) and not inQuote:
type = 2
doc_content_update = law_content
point = 1
else:
type_delete_text = re.search(r'(((b|B)ỏ cụm từ)|((b|B)ỏ từ))',law_content)
if(type_delete_text is not None):
type = 7
doc_content_update = law_content
point = 1
else:
type_add_text = p.finditer(law_content)
type_add_text1 = p1.finditer(law_content)
len1 = lenIterator(type_add_text)
len2 = lenIterator(type_add_text1)
if( (len1 != len2) and (len1 > 0)):
type = 1
doc_content_update = law_content
point = 1
else:
type_change_text = re.search(r'((t|T)hay\s)*(cụm\s)*từ\s.*(được\s)*(thay\s)*bằng\s(cụm\s)*từ',law_content)
if(type_change_text is not None):
type = 4
doc_content_update = law_content
point = 1
else :
type_name_to_name = re.search(r'((t|T)ên của\s).+(((S|s)ửa đổi\s)(\,\s)*((b|B)ổ sung\s)*)(thành)',law_content)
if(type_name_to_name is not None):
type = 5
doc_content_update = law_content
point = 1
else :
point = 0
# if(totalItem > 0):
# doc_content_update = item_content
if(point == 1):
yield[
law_id,
type,
doc_content_update,
numerical_symbol,
position,
date_released
]
|
from .app import app
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy(app)
### Add models here
|
# Сортировка вставками
# сложность: O(n**2)/лучшее время O(n)
# Устойчивсть: Устойчивая. Если есть 2 одинаковх элемента, то сохраняется их порядок.
# Тип(категория): Вставками
# Потребление памяти: Не требует доп. памяти
# Из массива последовательно берется каждый элемент, кроме первого(index == 0)
# И вставляется в отсортированную часть массива.
import random
size = 10
array = [i for i in range(size)]
random.shuffle(array)
print(array)
def insertion_sort(array):
for i in range(1, len(array)):
spam = array[i] # При сортировки книг на полке, мы вытягиваем книгу в руку и определяем, куда ее надо вставить если том больше, то двигаем книгу вправо
j = i #передвигаем палец на клигу слева
while array[j-1] > spam and j > 0:
array[j] = array[j-1]
j -= 1
array[j] = spam
print(array)
insertion_sort(array)
print(array)
|
from networkx import strongly_connected_components
from LSD.auxiliary_graph import AuxiliaryGraph
def get_strongly_connected_component(g):
"""Create the SCC components."""
sccs = strongly_connected_components(g)
dag = AuxiliaryGraph(0)
n = 1
non_singelton = []
for scc in sccs:
if len(scc) > 1 or next(iter(scc)) in set(g.successors(next(iter(scc)))):
nsscc = AuxiliaryGraph(n)
n += 1
for v in scc:
nsscc.add(v)
non_singelton.append(nsscc)
else:
dag.add(next(iter(scc)))
return dag, non_singelton
|
import logging
import torch
from torch import nn
from nlplay.data.cache import WordVectorsManager, DSManager, DS, WV
from nlplay.features.text_cleaner import *
from nlplay.models.pytorch.classifiers.att_conv_net import AttentiveConvNet
from nlplay.models.pytorch.dataset import DSGenerator
from nlplay.models.pytorch.pretrained import get_pretrained_vecs
from nlplay.models.pytorch.trainer import PytorchModelTrainer
logging.basicConfig(
format="%(asctime)s %(message)s", level=logging.DEBUG, datefmt="%Y-%m-%d %H:%M:%S"
)
# Input data files
ds = DSManager(DS.IMDB.value)
train_csv, test_csv, val_csv = ds.get_partition_paths()
lm = WordVectorsManager(WV.GLOVE_EN6B_100.value)
pretrained_vec = lm.get_wv_path()
# Model Parameters
num_epochs = 40
batch_size = 64
ngram_range = (1, 1)
max_features = 20000
max_seq = 200
embedding_size = 100
hidden_size = 64
margin_size = 3
attention_type = "bilinear"
attentive_conv_net_type = "advanced"
dropout = 0.5
lr = 0.0015
num_workers = 1
# Data preparation
ds = DSGenerator()
train_ds, val_ds = ds.from_csv(
train_file=train_csv,
val_file=test_csv,
ngram_range=ngram_range,
max_features=max_features,
preprocess_func=base_cleaner,
preprocess_ncore=3,
ds_max_seq=max_seq,
)
vecs = get_pretrained_vecs(
input_vec_file=pretrained_vec,
target_vocab=ds.vocab,
dim=embedding_size,
output_file=None,
)
model = AttentiveConvNet(
num_classes=ds.num_classes,
vocabulary_size=ds.vocab_size,
embedding_dim=embedding_size,
hidden_size=hidden_size,
margin_size=margin_size,
attentive_conv_net_type=attentive_conv_net_type,
attention_type=attention_type,
dropout=dropout,
pretrained_vec=vecs,
update_embedding=True,
apply_sm=False,
)
criterion = nn.CrossEntropyLoss()
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
trainer = PytorchModelTrainer(
model,
criterion,
optimizer,
train_ds=train_ds,
val_ds=val_ds,
batch_size=batch_size,
n_workers=num_workers,
epochs=num_epochs,
)
trainer.train_evaluate()
# 2021-01-02 11:43:31 ------------------------------------------
# 2021-01-02 11:43:31 --- SUMMARY ---
# 2021-01-02 11:43:31 ------------------------------------------
# 2021-01-02 11:43:31 Number of model parameters : 2211958
# 2021-01-02 11:43:31 Total Training Time: 1m 6s
# 2021-01-02 11:43:31 Total Time: 1m 6s
# 2021-01-02 11:43:31 Best Epoch: 2 - Accuracy Score: 0.870760
# 2021-01-02 11:43:31 ------------------------------------------
|
import os
import torch
import argparse
import logging
from dataset import Dataset
from utils import compute_F1, compute_exact_match
from torch.utils.data import DataLoader
from transformers import AdamW
from tqdm import tqdm
from trainer import train, valid
from transformers import AutoModelForQuestionAnswering, AutoTokenizer, AdamW
from knockknock import email_sender
import datetime
logger = logging.getLogger()
logger.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
now_time = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
parser = argparse.ArgumentParser()
parser.add_argument('--patience' , type = int, default=3)
parser.add_argument('--batch_size' , type = int, default=4)
parser.add_argument('--max_epoch' , type = int, default=2)
parser.add_argument('--base_trained_model', type = str, default = 'bert-base-uncased', help =" pretrainned model from 🤗")
parser.add_argument('--pretrained_model' , type = str, help = 'pretrainned model')
parser.add_argument('--gpu_number' , type = int, default = 0, help = 'which GPU will you use?')
parser.add_argument('--debugging' , type = bool, default = False, help = "Don't save file")
parser.add_argument('--log_file' , type = str, default = f'logs/log_{now_time}.txt', help = 'Is this debuggin mode?')
parser.add_argument('--dataset_name' , required= True, type = str, help = 'mrqa|squad|coqa')
# parser.add_argument('--max_length' , type = int, default = 512, help = 'max length')
parser.add_argument('--do_train' , default = True, help = 'do train or not', action=argparse.BooleanOptionalAction)
args = parser.parse_args()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler = logging.FileHandler(args.log_file)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
def makedirs(path):
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
args = parser.parse_args()
# @email_sender(recipient_emails=["jihyunlee@postech.ac.kr"], sender_email="knowing.deep.clean.water@gmail.com")
def main():
logger.info("start")
logger.info(args)
makedirs("./data"); makedirs("./logs"); makedirs("./model");
tokenizer = AutoTokenizer.from_pretrained(args.base_trained_model, use_fast=True)
train_dataset = Dataset(args.dataset_name, tokenizer, "train", logger)
val_dataset = Dataset(args.dataset_name, tokenizer, "validation", logger)
model = AutoModelForQuestionAnswering.from_pretrained(args.base_trained_model)
train_loader = DataLoader(train_dataset, args.batch_size, shuffle=True)
dev_loader = DataLoader(val_dataset, args.batch_size, shuffle=True)
optimizer = AdamW(model.parameters(), lr=5e-5, weight_decay=0.01)
device = torch.device(f'cuda:{args.gpu_number}' if torch.cuda.is_available() else 'cpu')
torch.cuda.set_device(device) # change allocation of current GPU
torch.cuda.empty_cache()
if args.pretrained_model:
logger.info("use trained model")
model.load_state_dict(torch.load(args.pretrained_model))
model.to(device)
penalty = 0
min_loss = float('inf')
for epoch in range(args.max_epoch):
logger.info(f"Epoch : {epoch}")
if args.do_train:
train(model, train_loader, optimizer, device, logger)
pred_texts, ans_texts, loss = valid(model, dev_loader, device, tokenizer, logger)
EM, F1 = 0, 0
for iter, (pred_text, ans_text) in enumerate(zip(pred_texts, ans_texts)):
EM += compute_exact_match(pred_text, ans_text)
F1 += compute_F1(pred_text, ans_text)
logger.info("Epoch : %d, EM : %.04f, F1 : %.04f, Loss : %.04f" % (epoch, EM/iter, F1/iter, loss))
if loss < min_loss:
logger.info("New best")
min_loss = loss
penalty = 0
if not args.debugging:
torch.save(model.state_dict(), f"model/{args.dataset_name}.pt")
else:
penalty +=1
if penalty>args.patience:
logger.info(f"early stopping at epoch {epoch}")
break
return {'EM' : EM/iter, 'F1' : F1/iter}
if __name__ =="__main__":
main()
|
# python3
def count_inversions(array):
def inversions_merge(left, right):
result = []
l, r = 0, 0
inversions = 0
while l < len(left) and r < len(right):
if left[l] > right[r]:
result.append(right[r])
r += 1
inversions += len(left) - l
else:
result.append(left[l])
l += 1
for i in range(l, len(left)):
result.append(left[i])
for i in range(r, len(right)):
result.append(right[i])
return inversions, result
def recur(arr):
if len(arr) == 1:
return 0, arr
mid = len(arr) // 2
left_inv, left_arr = recur(arr[:mid])
right_inv, right_arr = recur(arr[mid:])
new_inv, new_arr = inversions_merge(left_arr, right_arr)
return left_inv + right_inv + new_inv, new_arr
inversions, _ = recur(array)
return inversions
if __name__ == '__main__':
_ = input()
array = list(map(int, input().split()))
print(count_inversions(array))
|
#!/usr/bin/python
Usage = """
Convert the FASTA sequences to static FASTQ format
Quality values are encoded as H for 39
needs input file and ouputfile names
Usage: -version 1.0 (Python3)
fasta2fastq.py inputfile.fasta output.fastq
Kan Liu
liukan.big@gmail.com
11/04/2018
"""
import sys
from Bio.SeqIO.FastaIO import SimpleFastaParser
if len(sys.argv)<3:
print (Usage)
else:
cmdargs = str(sys.argv)
with open(str(sys.argv[1]),'r') as in_handle:
with open(str(sys.argv[2]), "w") as out_handle:
for title, seq in SimpleFastaParser(in_handle):
out_handle.write("@%s\n%s\n+\n%s\n" \
% (title, seq, "H" * len(seq))) |
import os
import subprocess
import sys
def untar(file, output_dir):
subprocess.check_call("tar -xf {file} -C {output_dir}".format(file=file, output_dir=output_dir).split())
for dataset in ["ILSVRC2012_img_train.tar", "ILSVRC2012_img_val.tar"]:
dataset_name, _ = dataset.split(".")
os.mkdir("{dset}".format(dset=dataset_name))
print("Unpacking {tar}".format(tar=dataset))
untar(dataset, dataset_name)
classes = os.listdir("./{dir}".format(dir=dataset_name))
if "train" in dataset:
# Recursively unpack tars
for cls in classes:
print("Unpacking file {name}".format(name=cls))
label, _ = cls.split(".")
output_dir = "./{dir}/{label}".format(dir=dataset_name, label=label)
os.mkdir(output_dir)
tar_file = "./{dir}/{file}".format(dir=dataset_name, file=cls)
untar(tar_file, output_dir)
os.remove(tar_file)
|
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 26 16:24:36 2017
@author: ankit
"""
import sys
#f = open('housepriceinput.txt','r')
fn = input().split()
#print(fn)
f = int(fn[0])
n = int(fn[1])
#print(type(n),n)
xtrain = []
nrow =[]
for i in range(0,n):
row = input().split()
for number in row:
number = float(number)
nrow.append(number)
xtrain.append(nrow)
print(xtrain)
ytrain=[]
for idx,row in enumerate(xtrain):
ytrain.append(row.pop())
print('YYYYYYYYYYY',ytrain)
nrow= []
xtest=[]
fn = input().strip().split()
for i in range(0,fn):
row = input().split()
for number in row:
number = float(number)
nrow.append(number)
xtest.append(nrow)
print('xtest',xtest) |
#!/usr/bin/python3
"""This module holds a class BaseModel
that is the main class in the project
"""
import uuid
import datetime
from models import storage
class BaseModel:
"""BaseModel main class set the value of a
new instance or instance a class from a
dictionary of an object previous created
"""
def __init__(self, *args, **kwargs):
"""Constructor method that initilize an
instance o create a new one from a
dictionary in the paramether kwargs
"""
if kwargs:
if "id" in kwargs:
self.id = kwargs["id"]
else:
self.id = str(uuid.uuid4())
if "craeted_at" in kwargs:
self.created_at = datetime.datetime.strptime(
kwargs["created_at"], "%Y-%m-%dT%H:%M:%S.%f"
)
else:
self.created_at = datetime.datetime.now()
if "updated_at" in kwargs:
self.updated_at = datetime.datetime.strptime(
kwargs["updated_at"], "%Y-%m-%dT%H:%M:%S.%f"
)
else:
self.updated_at = datetime.datetime.now()
for key, value in kwargs.items():
if key in ["id", "created_at", "updated_at", "__class__"]:
pass
else:
setattr(self, key, value)
else:
self.id = str(uuid.uuid4())
self.created_at = datetime.datetime.now()
self.updated_at = datetime.datetime.now()
storage.new(self)
def __str__(self):
"""No official representation of an object
Returns:
[str]: Representation No Oficial of the instance
"""
printable_str = "[{}] ({}) {}".format(
type(self).__name__, self.id, str(self.__dict__)
)
return printable_str
def save(self):
"""Public Instance Method that update
the datetime and storage the instance
serializating and saving in a JSON file
"""
self.updated_at = datetime.datetime.now()
storage.save()
def to_dict(self):
"""Public Instance Method that create a
dictionary with all the information of
the instance with a spaceific format
Returns:
[dic]: Dictionary with all instance information
"""
new_dict = self.__dict__.copy()
new_dict["__class__"] = type(self).__name__
new_dict["updated_at"] = new_dict["updated_at"].isoformat()
new_dict["created_at"] = new_dict["created_at"].isoformat()
return new_dict
|
import os
import time
import datetime
import glob
import MySQLdb
from time import strftime
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
rain_sensor = ' '
# Variables for MySQL
db = MySQLdb.connect(host="localhost", user="root",passwd="123", db="rain_database")
cur = db.cursor()
def moistRead():
t = open(rain_database, 'r')
lines = t.readlines()
t.close()
rain_output = lines[1].find('t=')
if rain_output != -1:
rain_string = lines[1].strip()[temp_output+2:]
rain_c = float(temp_string)/1000.0
return round(temp_c,1)
while True:
rain = rainRead()
print (rain)
datetimeWrite = (time.strftime("%Y-%m-%d ") + time.strftime("%H:%M:%S"))
print (datetimeWrite)
sql = ("""INSERT INTO moist_Log (datetime,rain) VALUES (%s,%s)""",(datetimeWrite,rain))
try:
print ("Writing to database...")
# Execute the SQL command
cur.execute(*sql)
# Commit your changes in the database
db.commit()
print ("Write Complete")
except:
# Rollback in case there is any error
db.rollback()
print ("Failed writing to database")
cur.close()
db.close()
break |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pandas as pd
from sklearn.base import TransformerMixin,BaseEstimator
from TextNormalizer import TextNormalizer
from sklearn.pipeline import Pipeline
from nltk.tokenize import TreebankWordTokenizer
import numpy as np
from sklearn.feature_extraction.text import CountVectorizer
import pickle
from keras.models import Sequential
from keras.layers.convolutional import Conv1D,MaxPooling1D,AveragePooling1D
from keras.layers.recurrent import LSTM, GRU
from sklearn.model_selection import train_test_split
from keras.utils import to_categorical
from sklearn.utils.class_weight import compute_class_weight
from keras.layers.core import Dense, Dropout, Activation
from keras.layers import Flatten
from keras.callbacks import ModelCheckpoint
class UnknownModelOption(Exception):
pass
class Vectorizer(BaseEstimator, TransformerMixin):
def __init__(self, mtype):
def load_gb_vectors():
self.__w2v_sg = pickle.load(open('w2v_sg.pkl','rb'))
self.__w2v_cbw = pickle.load(open('w2v_cbw.pkl','rb'))
def load_net_vectors():
self.__w2v_sg = pickle.load(open('w2v_sg.pkl','rb'))
load_options = {
'GB': load_gb_vectors(),
'NET': load_net_vectors()
}
load_options[mtype]
self.__mtype = mtype
def __transform_NET(self, X):
def make_padded_sequenses(docs, max_length,w2v):
tokens = [doc.split(' ') for doc in docs[0]]
vecs = [[w2v[t] if t in w2v else np.zeros(50) for t in ts] for ts in tokens]
seqs = np.array([np.pad(np.vstack(v),mode = 'constant', pad_width = ((0,max_length-len(v)),(0,0))) if len(v)<max_length else np.vstack(v)[:max_length,:] for v in vecs])
return seqs
vector_sg = make_padded_sequenses([X], 15,w2v = self.__w2v_sg)
return vector_sg
def __transform_GB(self, X):
def make_avg_sequenses(docs,w2v):
tokens = [doc.split(' ') for doc in docs]
vecs = [[w2v[t] if t in w2v else np.zeros(50) for t in ts] for ts in tokens]
seqs = np.array([np.apply_along_axis(np.mean, 0, np.vstack(v)) for v in vecs])
return seqs
vector_sg = make_avg_sequenses(X,w2v = self.__w2v_sg)
vector_cbw = make_avg_sequenses(X,w2v = self.__w2v_cbw)
vector = np.hstack([vector_sg,vector_cbw])
return vector.reshape(1,-1)
def fit(self, X=None, y=None, **fit_params):
return self
def transform(self, X, y=None, **fit_params):
transform_options = {
'GB': self.__transform_GB(X),
'NET': self.__transform_NET(X)
}
return transform_options[self.__mtype]
def fit_transform(self, X, y=None, **fit_params):
self.fit(X, y, **fit_params)
return self.transform(X)
class QuestionClassifier():
def __init__(self, mtype):
self.__tn = TextNormalizer().fit()
self.__vectorizer = Vectorizer(mtype).fit()
self.__load_model(mtype)
self.__mtype = mtype
def __prepare_network(self):
model = Sequential()
model.add(Conv1D(filters = 42, kernel_size = 2, input_shape = (15,50), activation='relu'))
model.add(Conv1D(filters = 20, kernel_size = 2, activation='relu'))
model.add(Conv1D(filters = 15, kernel_size = 2, activation='relu'))
model.add(Conv1D(filters = 10, kernel_size = 2, activation='relu'))
model.add(MaxPooling1D(4))
model.add(Flatten())
model.add(Dropout(0.5))
model.add(Dense(5, activation='softmax'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
model.load_weights('weights.hdf5')
return model
def __load_model(self,mtype):
load_options = {
'GB': pickle.load(open('GBoost.pkl','rb')),
'NET': self.__prepare_network()
}
if mtype in load_options:
self.__model = load_options[mtype]
else:
raise UnknownModelOption('Specified model option is not supprted')
def predict(self, text):
class_names = ['другие',
'вопросы о сервисе',
'вопросы о товаре',
'общеразговорные',
'более 1 категории']
cleaned_text = self.__tn.transform([text.lower()])
feats = self.__vectorizer.transform(cleaned_text)
prediction = self.__model.predict(feats)
model_out = {
'GB': prediction[0],
'NET': np.argmax(prediction)
}
class_num = model_out[self.__mtype]
return class_names[class_num]
if __name__ == '__main__':
classifier = QuestionClassifier(mtype = 'NET')
while True:
review = input()
print('\n')
print(classifier.predict(review)) |
import numpy as np
import sys
#dataname - string - name of dataset
#ns - integer - number of sets to generate
#otype - string - type of output sets (binary or text files)
def generating_datasets(dataname, ns=20, otype='binary'):
if dataname == 'cereales':
orig_name = 'cereales.data'
elif dataname == 'credit':
orig_name = 'credit.data'
else:
sys.error('Error: Requested data is not in this database')
#load original dataset as numpy array
A = np.loadtxt(orig_name, dtype=float, delimiter=' ')
r,c = A.shape
for i in range(ns):
#shuffle rows of data matrix
B = A.copy()
np.random.shuffle(B)
#names for training and testing files
name_tr = dataname+'-tr-'+str(i)
name_ts = dataname+'-ts-'+str(i)
if otype=='binary':
#save sets as numpy binary files
np.save(name_tr, B[0:np.round(0.75*r)])
np.save(name_ts, B[np.round(0.75*r):r])
elif otype=='text':
#save sets as text files
np.savetxt(name_tr, B[0:np.round(0.75*r)], delimiter=' ')
np.savetxt(name_ts, B[np.round(0.75*r):r], delimiter=' ')
return 1
|
import torch
import torch.nn as nn
from . constants import *
from transformers import AutoModel
class Abbreviator(nn.Module):
def __init__(self):
super().__init__()
self.prediction_layers = [ nn.Linear( 1024, len(SYMBOLS) ) for _ in range(MAX_OUT_LEN) ]
self.abbrnet = nn.Sequential(
nn.Linear(FIRST_LAYER_SIZE, 2048),
nn.Sigmoid(),
# nn.Linear(2048, 4096),
# nn.Sigmoid(),
# nn.Dropout(p=0.05),
# nn.Linear(4096, 4096),
# nn.Sigmoid(),
# nn.Linear(4096, 2048),
# nn.Sigmoid(),
nn.Linear(2048, 1024),
nn.Sigmoid(),
nn.Linear(1024,1024),
nn.ReLU(),
# nn.Linear(1024, LAST_LAYER_SIZE)
nn.Linear(1024,1024)
)
def forward(self, input_seq):
abbr_pred = self.abbrnet(input_seq)
return [ layer(abbr_pred) for layer in self.prediction_layers ]
|
from flask import Flask
from flask import request
from flask import render_template
from datetime import date
import sqlite3
from sqlite3 import Error
blog = Flask(__name__)
@blog.route('/novotopico', methods=['GET', 'POST'])
def cadastro():
if request.method == 'POST':
topico = request.form['topico']
return 'teste'
@blog.route('/teste', methods=['GET', 'POST'])
def ok():
return render_template('teste.html')
@blog.route('/success')
def success():
return render_template('success.html')
@blog.errorhandler(404)
def pagina_nao_encontrada(e):
return render_template('404.html'), 404
blog.run() |
# -*- coding: utf-8 -*-
# Copyright (c) 2020. Distributed under the terms of the MIT License.
from math import log10
from typing import List
from matplotlib import pyplot as plt
from plotly import graph_objects as go
from vise.analyzer.dielectric_function import DieleFuncData
from vise.analyzer.dielectric_function_data.exp_dielectric_func import \
ExpDieleFunc
from vise.util.matplotlib import float_to_int_formatter
class AbsorptionCoeffPlotter:
def __init__(self,
diele_func_data: DieleFuncData,
energy_range: List[float] = None,
coeff_power_range: List[float] = None,
materials: List[str] = None,
yranges: List[float] = None):
self.energies = diele_func_data.energies
self.absorption_coeff = diele_func_data.ave_absorption_coeff
self.band_gap = diele_func_data.band_gap
self.energy_range = energy_range or [0, 10]
self.coeff_power_range = coeff_power_range
self.materials = materials
self._xaxis_title = "Energy (eV)"
if yranges:
self.ymin, self.ymax = yranges
else:
self.ymin, self.ymax = 10**3, 10**7
self.plt = plt
class AbsorptionCoeffPlotlyPlotter(AbsorptionCoeffPlotter):
_yaxis_title = "Absorption coefficient. (cm <sup>-1</sup>)"
def create_figure(self):
fig = go.Figure()
fig.update_layout(
xaxis_title=self._xaxis_title,
yaxis_title=self._yaxis_title,
font_size=25,
width=800, height=700)
fig.add_trace(go.Scatter(x=self.energies,
y=self.absorption_coeff,
line=dict(width=2.5),
name="Average"))
fig.add_trace(go.Scatter(x=[self.band_gap, self.band_gap],
y=[self.ymin, self.ymax],
line=dict(width=2, dash="dash"),
line_color="black",
name="Band gap"))
if self.materials:
for material in self.materials:
exp = ExpDieleFunc(material)
energies = exp.energies
fig.add_trace(go.Scatter(x=[exp.band_gap, exp.band_gap],
y=[self.ymin, self.ymax],
line=dict(width=1, dash="dash"),
showlegend=False,
line_color="black",
name=f"{material} band gap"))
fig.add_trace(go.Scatter(x=energies, y=exp.absorption_coeff,
line=dict(width=1, dash="dashdot"),
name=material))
fig.update_xaxes(range=self.energy_range, tickfont_size=20)
fig.update_yaxes(type="log",
range=[log10(self.ymin), log10(self.ymax)],
tickfont_size=20,
showexponent="all", exponentformat='power'
)
return fig
class AbsorptionCoeffMplPlotter(AbsorptionCoeffPlotter):
_yaxis_title = "Absorption coefficient. (cm-1)"
def construct_plot(self):
self._add_coeffs()
self._add_band_gap()
if self.materials:
self._add_materials()
# self._set_figure_legend()
self._set_x_range()
self._set_y_range()
self._set_labels()
self._set_formatter()
self.plt.tight_layout()
def _add_coeffs(self):
self.plt.semilogy(self.energies, self.absorption_coeff)
def _add_band_gap(self):
self.plt.axvline(x=self.band_gap, linestyle="dashed", color="black",
linewidth=1)
def _add_materials(self):
for material in self.materials:
exp = ExpDieleFunc(material)
energies = exp.energies
self.plt.axvline(x=exp.band_gap,
linestyle="dashed",
color="black",
linewidth=1)
self.plt.semilogy(energies,
list(exp.absorption_coeff),
label=material,
linestyle="dashdot",
)
def _set_figure_legend(self):
self.plt.legend(loc="lower right")
def _set_x_range(self):
self.plt.xlim(0, 10)
def _set_y_range(self):
self.plt.gca().set_ylim(ymin=self.ymin, ymax=self.ymax)
def _set_labels(self):
self.plt.xlabel(self._xaxis_title)
self.plt.ylabel(self._yaxis_title)
def _set_formatter(self):
axis = self.plt.gca()
axis.xaxis.set_major_formatter(float_to_int_formatter) |
import math, random, string
import numpy as np
target = "kuldeeplovesgeneticalgorithm"
def diff(s1, s2) :
sum = 0
for i in range(len(s1)) :
sum += (ord(s1[i])-ord(s2[i]))**2
return math.sqrt(sum)
def getRandomString() :
# s = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for _ in range(N))
# return s.replace('[0-9]', ' ')
s = ""
for i in range(len(target)) :
if(target[i] != ' ') :
random_char = np.random.randint(ord('a'), ord('z'))
s += chr(random_char)
# else :
# s += ' '
return s
# selection
def selection(offsprings) :
best = None
best2 = None
min_val = 1e8
min_val2 = 1e8
for offspring in offsprings :
val = diff(offspring, target)
if(val < min_val) :
min_val = val
best = offspring
elif(val < min_val2) :
min_val2 = val
best2 = offspring
if(best2 == None) : best2 = best
return best, best2
# single point crossover
def crossover(s1, s2) :
l1 = np.random.randint(len(s1))
l2 = np.random.randint(len(s1))
offspring1 = s1[:l1] + s2[l1:]
offspring2 = s2[:l2] + s1[l2:]
return offspring1, offspring2
# mutation
def mutation(s) :
rand_index1 = np.random.randint(len(s))
rand_index2 = np.random.randint(len(s))
sList = list(s)
# sList[rand_index1], sList[rand_index2] = sList[rand_index2], sList[rand_index1]
sList[rand_index1] = chr(np.random.randint(ord('a'), ord('z')))
return ''.join(sList)
def generatePopulation(n, k) :
populations = []
for i in range(n) :
population.append(getRandomString(k))
return populations
def v(s1, s2) :
if(np.random.rand() >= 0.5) :
s1, s2 = crossover(s1, s2)
populations = []
for i in range(len(s1)) :
if(np.random.rand() >= 0.5) :
populations.append(mutation(s1))
else :
populations.append(s1)
if(np.random.rand() >= 0.5) :
populations.append(mutation(s2))
else :
populations.append(s2)
return selection(populations)
parent1 = getRandomString()
parent2 = getRandomString()
print("parent1 = ", parent1)
print("parent2 = ", parent2)
iter = 0
while(iter < 1000) :
if((parent1 == target) or (parent2 == target)) :
print("Final parent1, Final parent2 = ", parent1, parent2)
print("Reached in iter", iter)
break
print("parent1, parent2 = ", parent1, parent2)
parent1, parent2 = v(parent1, parent2)
iter += 1
print("end") |
import time
import pickle
import random
from http.client import HTTPConnection
import hashlib
import urllib
import random
import json
import re
import os
#用于将外文知识图谱的信息翻译成中文的
#这里使用的是百度的NMT模型的api
#翻译完的结果中,在本次实验仅使用名字部分来学习语义表示,其他部分暂不使用
#return result_list,flag.
def baiduapi(text,from_lang,to_lang):
appid = 'xxxxxxx' #保密起见删掉了
secretKey = 'xxxxxxxx'
httpClient = None
myurl = '/api/trans/vip/translate'
q = text
fromLang = from_lang
toLang = to_lang
salt = random.randint(32768, 65536)
sign = appid + q + str(salt) + secretKey
# m1 = md5.new()
m1 = hashlib.md5()
m1.update(sign.encode())
sign = m1.hexdigest()
myurl = myurl + '?appid=' + appid + '&q=' + urllib.parse.quote(q) + '&from=' + fromLang + '&to=' + toLang + '&salt=' + str(salt) + '&sign=' + sign
try:
httpClient = HTTPConnection('api.fanyi.baidu.com')
httpClient.request('GET', myurl)
# response是HTTPResponse对象
response = httpClient.getresponse()
get_vaule = response.read().decode('utf-8')
get_vaule = json.loads(get_vaule)
result = get_vaule['trans_result']
res = []
for one in result:
res.append(one['dst'])
flag = True
except Exception as e:
print(e)
flag = False
res = []
finally:
if httpClient:
httpClient.close()
return res,flag
def lower_rel_name(name):
if r"/property/" in name:
a, b = name.split(r"/property/")
b = b.lower()
string = a + r"/property/" + b
else:
a, b = name.rsplit(r'/',1)
b = b.lower()
string = a + r"/" + b
return string
def read_rel_part(path):
#rel part is with code part.
ents_1 = []
ents_2 = []
ent2name_1 = dict()
ent2name_2 = dict()
rels_1 = []
rels_2 = []
rel2name_1 = dict()
rel2name_2 = dict()
triples_1 = []
triples_2 = []
ent_ills = []
with open(path + "ent_ids_1","r",encoding="utf-8") as f:
for line in f:
id, name = line.rstrip("\n").split('\t')
id = int(id)
ent2name_1[id] = name
ents_1.append(name)
with open(path + "ent_ids_2","r",encoding="utf-8") as f:
for line in f:
id, name = line.rstrip("\n").split('\t')
id = int(id)
ent2name_2[id] = name
ents_2.append(name)
with open(path + "rel_ids_1","r",encoding="utf-8") as f:
for line in f:
id , name = line.rstrip("\n").split('\t')
name = lower_rel_name(name)
id = int(id)
rel2name_1[id] = name
rels_1.append(name)
with open(path + "rel_ids_2","r",encoding="utf-8") as f:
for line in f:
id , name = line.rstrip("\n").split('\t')
name = lower_rel_name(name)
id = int(id)
rel2name_2[id] = name.lower()
rels_2.append(name)
with open(path + "triples_1","r",encoding="utf-8") as f:
for line in f:
h,r,t = line.rstrip("\n").split('\t')
h = ent2name_1[int(h)]
r = rel2name_1[int(r)]
t = ent2name_1[int(t)]
triples_1.append((h,r,t))
with open(path + "triples_2","r",encoding="utf-8") as f:
for line in f:
h,r,t = line.rstrip("\n").split('\t')
h = ent2name_2[int(h)]
r = rel2name_2[int(r)]
t = ent2name_2[int(t)]
triples_2.append((h,r,t))
with open(path + "ent_ILLs","r",encoding="utf-8") as f:
for line in f:
ent1,ent2 = line.rstrip("\n").split('\t')
ent_ills.append((ent1,ent2))
return triples_1,triples_2,ents_1,ents_2,rels_1,rels_2,ent_ills
def reform_literal(liter_string):
return liter_string.rstrip('@zhenfrdeja .\n').strip("<>").split(r"^^<")[0].strip("\"").replace('\n',' ')
def reform_att(att_string):
if r"/property/" in att_string:
return att_string.split(r"/property/")[-1].lower()
else:
return att_string.split(r"/")[-1].lower()
def refrom_name(string):
return string.split(r"dbpedia.org/resource/")[-1].lower()
# def sip_del_char2(string):
# return string.replace('.', '').replace('(', '').replace(')', '').replace(',', '').replace('_', '').replace('-', '').replace(' ', '')
def dict_add_r_t(dic,h,r,t):
if h not in dic:
dic[h] = dict()
if r not in dic[h]:
dic[h][r] = set()
dic[h][r].add(t)
def read_att_part(path,lan1,lan2,new_path,top_num=50):
print("TOP ATT NUM IS :",top_num)
triples_1 = []
triples_2 = []
re_att_triples_1 = []
re_att_triples_2 = []
atts_1 = []
atts_2 = []
h_a_v_1 =dict()
h_a_v_2 =dict()
with open(path + lan1 + "_att_triples","r",encoding="utf-8") as f:
for line in f:
string = line.rstrip("\n .")
h,a,v = string.split(' ',2)
h = h.strip("<>")
a = lower_rel_name(a.strip("<>"))
v = reform_literal(v.strip("<>"))
triples_1.append((h,a,v))
dict_add_r_t(h_a_v_1, h, a, v)
with open(path + lan2 + "_att_triples","r",encoding="utf-8") as f:
for line in f:
string = line.rstrip("\n .")
h,a,v = string.split(' ',2)
h = h.strip("<>")
a = lower_rel_name(a.strip("<>"))
v = reform_literal(v.strip("<>"))
triples_2.append((h,a,v))
dict_add_r_t(h_a_v_2, h, a, v)
#fre cumu
fre_att_dict1 = dict()
fre_att_dict2 = dict()
for h in h_a_v_1.keys():
for a in h_a_v_1[h].keys():
if a not in fre_att_dict1:
fre_att_dict1[a] = 0
fre_att_dict1[a] += 1
for h in h_a_v_2.keys():
for a in h_a_v_2[h].keys():
if a not in fre_att_dict2:
fre_att_dict2[a] = 0
fre_att_dict2[a] += 1
#get top num st att
att1_list = [(att,fre) for att,fre in fre_att_dict1.items()]
att2_list = [(att,fre) for att,fre in fre_att_dict2.items()]
att1_list.sort(key=lambda x:x[1],reverse=True)
att2_list.sort(key=lambda x:x[1],reverse=True)
re_att_1 = [att for att,fre in att1_list][:top_num]
re_att_2 = [att for att,fre in att2_list][:top_num]
re_att_1 = set(re_att_1)
re_att_2 = set(re_att_2)
print("print(len(re_att_1)) print(len(re_att_2))")
print(len(re_att_1))
print(len(re_att_2))
#filter triples
for h,a,v in triples_1:
if a in re_att_1 and v.replace(' ','')!="":
re_att_triples_1.append((h,a,v))
atts_1.append(a)
for h,a,v in triples_2:
if a in re_att_2 and v.replace(' ','')!="":
re_att_triples_2.append((h,a,v))
atts_2.append(a)
#write down new atttriples:
with open(new_path + "new_att_triples_1","w",encoding="utf-8") as f:
for h,a,v in re_att_triples_1:
string = h + '\t' + a + '\t' + v + '\n'
f.write(string)
with open(new_path + "new_att_triples_2","w",encoding="utf-8") as f:
for h,a,v in re_att_triples_2:
string = h + '\t' + a + '\t' + v + '\n'
f.write(string)
return atts_1,atts_2,re_att_triples_1,re_att_triples_2
#get it from multiKE code.
def sip_del_char(string):
return string.replace('.', '').replace('(', '').replace(')', '').replace(',', '').replace('_', ' ')#.replace('-', ' ')
def get_to_translate_vaule_list(ents_1,ents_2,rels_1,rels_2,atts_1,atts_2,att_triples_1,att_triples_2):
eng_mapping_ent = dict()
eng_mapping_rel = dict()
vaule_list = []
rel_name_list = []
entity_name_list = []
for ent_list in [ents_1]:
for ent in ent_list:
name = refrom_name(ent)
eng_mapping_ent[ent] = name
entity_name_list.append(name)#
for ent_list in [ents_2]:
for ent in ent_list:
name = sip_del_char(refrom_name(ent))
eng_mapping_ent[ent] = name
vaule_list.append(name)
for rel_list in [rels_1,atts_1]:
for rel in rel_list:
name = sip_del_char(reform_att(rel))
eng_mapping_rel[rel] = name
rel_name_list.append(name)
for rel_list in [rels_2,atts_2]:
for rel in rel_list:
name = sip_del_char(reform_att(rel))
eng_mapping_rel[rel] = name
vaule_list.append(name)
for att_tri_list in [att_triples_1,att_triples_2]:
for h,a,v in att_tri_list:
vaule_list.append(v)
return vaule_list,eng_mapping_ent,eng_mapping_rel,entity_name_list,rel_name_list
def isen_string(string):
temp_string = string.strip()
temp_string = temp_string.replace('.', '').replace('(', '').replace(')', '').replace(',', '').replace('_', '').replace('-', '').replace(' ', '')
temp_string = temp_string.replace(r'"', '').replace(r'/', '').replace('\\', '').replace('#', '').replace(':','').replace('?','').replace('!','')
if re.match(pattern = '[0-9a-zA-Z_.-]+$',string=temp_string):
return True
else:
return False
def write_rel_triples(path,rel_triple_1,rel_triples_2):
for filename,rel_tri in [("rel_triples_1",rel_triple_1),("rel_triples_2",rel_triples_2)]:
with open(path + filename,"w",encoding="utf-8") as f:
for h,r,t in rel_tri:
string = h + "\t" + r + "\t" + t + '\n'
f.write(string)
def write_train_val_test_ill_pairs(path,valid_path,ills):
all_data = ills
with open(path+"ent_links","w",encoding="utf-8") as f:
for e1,e2 in all_data:
string = e1 + '\t' + e2 +'\n'
f.write(string)
train_data = random.sample(all_data,4500)
all_data = list(set(all_data)-set(train_data))
valid_data = random.sample(all_data,1000)
test_data = list(set(all_data)-set(valid_data))
print("train/vaild/test length:",len(train_data),len(valid_data),len(test_data))
print("overloap:",len(set(train_data)&set(test_data)),len(set(train_data)&set(valid_data)),
len(set(test_data)&set(valid_data)))
temp_list = []
temp_list.append(("train_links",train_data))
temp_list.append(("valid_links",valid_data))
temp_list.append(("test_links",test_data))
for filename,data in temp_list:
with open(valid_path+filename,"w",encoding="utf-8") as f:
for e1,e2 in data:
string = e1 + '\t' + e2 +'\n'
f.write(string)
def write_predicate_local_name(path,rel2name,name2en,o_rels_1, o_rels_2,o_atts_1, o_atts_2):
#predicate_local_name_1 predicate_local_name_1
all_rel_1 = []
all_rel_2 = []
for rel in o_rels_1:
all_rel_1.append(rel)
for rel in o_rels_2:
all_rel_2.append(rel)
for rel in o_atts_1:
all_rel_1.append(rel)
for rel in o_atts_2:
all_rel_2.append(rel)
all_rel_1 = list(set(all_rel_1))
all_rel_2 = list(set(all_rel_2))
with open(path + "predicate_local_name_1","w",encoding="utf-8") as f:
for rel in all_rel_1:
name = rel2name[rel]
en = name2en[name].lower()
string = rel + '\t' + en + '\n'
f.write(string)
with open(path + "predicate_local_name_2","w",encoding="utf-8") as f:
for rel in all_rel_2:
name = rel2name[rel]
en = name2en[name].lower()
string = rel + '\t' + en + '\n'
f.write(string)
def write_entity_name(path,ents_1,ents_2,ent2name,name2en):
temp_list = []
temp_list.append(("entity_local_name_1",ents_1))
temp_list.append(("entity_local_name_2",ents_2))
index_num = 0
for filename,ents in temp_list:
with open(path + filename,"w",encoding="utf-8") as f:
for e in ents:
name = ent2name[e]
if name in name2en:
en = name2en[name].lower()
else:
en = refrom_name(e)
print("here name error in ",e,"name:",name,"fin name:",en)
index_num += 1
string = e + '\t' + en +'\n'
f.write(string)
print("error write entity name num:",index_num)
def write_att_triples_data(path,att_triples_1,att_triples_2,vaule2en):
temp_list = []
temp_list.append(("attr_triples_1",att_triples_1))
temp_list.append(("attr_triples_2",att_triples_2))
for filename,att_tri in temp_list:
with open(path + filename,"w",encoding="utf-8") as f:
for h,a,v in att_tri:
en = vaule2en[v].lower()
string = h + '\t' + a + '\t' + en + '\n'
f.write(string)
if __name__ == '__main__':
ori_data_path = r"./dataset/ja_en/"
new_data_path = r"./dataset/ja_en/"
langague_align_dict_path = ori_data_path + "vaule_mapping.pickle" #
valid_data_path = new_data_path
lang = "ja"
trans_lang = "ja"
print(ori_data_path)
print(new_data_path)
if not os.path.exists(new_data_path):
os.makedirs(new_data_path)
if not os.path.exists(valid_data_path):
os.makedirs(valid_data_path)
vaule2en = dict()
if os.path.exists(langague_align_dict_path):
print("There is the pretraind language map.")
vaule2en = pickle.load(open(langague_align_dict_path, "rb"))
print("load the pretrained language map")
print("map length = ", len(vaule2en.keys()))
#0 read data
o_rel_triples_1, o_rel_triples_2, o_ents_1, o_ents_2, o_rels_1, o_rels_2, ent_ills = read_rel_part(ori_data_path)
o_atts_1, o_atts_2, o_att_triples_1, o_att_triples_2 = read_att_part(ori_data_path,lang,"en",new_data_path,top_num=40)
o_atts_1 = list(set(o_atts_1))
o_atts_2 = list(set(o_atts_2))
o_rels_1 = list(set(o_rels_1))
o_rels_2 = list(set(o_rels_2))
print(len(o_atts_1))
print(len(o_atts_2))
index_num = 0
for a,b in ent_ills:
if refrom_name(a) == refrom_name(b):
index_num+=1
print(index_num/len(ent_ills))
vaule_list, entity2name, rel2name,temp_entity_name_list,rel_name_list = \
get_to_translate_vaule_list(o_ents_1, o_ents_2, o_rels_1, o_rels_2, o_atts_1, o_atts_2, o_att_triples_1, o_att_triples_2)
print("len vaule list",len(vaule_list))
vaule_list = list(set(vaule_list))
print("len set vaule list", len(vaule_list))
####dele no need translate data:
need_translate_list = []
for vaule in vaule_list:
if isen_string(vaule):
if vaule not in vaule2en:
vaule2en[vaule] = vaule
else:
if vaule not in vaule2en:
need_translate_list.append(vaule)
print(len(need_translate_list))
for vaule in rel_name_list:
if vaule not in vaule2en:
need_translate_list.append(vaule)
for vaule in temp_entity_name_list:
vaule2en[vaule] = vaule
print("len need translate data", len(need_translate_list))
print("len need translate data", len(need_translate_list))
#1 step----translate.
batch_length = 40
for i in range(0,len(need_translate_list),batch_length):
print("Now:---",i,r"/",len(need_translate_list))
temp_list = need_translate_list[i:min(i+batch_length ,len(need_translate_list))]
#string is equal to lines
string = ""
for one in temp_list:
string += "\n"+one
string = string.strip()
#string = temp_list[0]
trans_res_list,flag = baiduapi(string, trans_lang ,"en")
ori_str_list = string.split('\n')
if len(ori_str_list)!=len(trans_res_list):
print(len(ori_str_list))
print(len(trans_res_list))
print(ori_str_list)
print(trans_res_list)
print("error?in list length")
flag = False
else:
try:
for j in range(len(ori_str_list)):
vaule2en[ori_str_list[j]] = trans_res_list[j]
except Exception as e:
print(e)
print("ERROR except")
if flag == False:
print("ERROR!")
break
#must sleep!
time.sleep(1)
if i % 5000 == 0:
with open(langague_align_dict_path, "wb") as f:
pickle.dump(vaule2en, f)
####save the mapping
with open(langague_align_dict_path, "wb") as f:
pickle.dump(vaule2en, f)
#2 step get_new_data!
"""
-------------
"""
#exit(0)
#rel_triples_1 rel_triples_2
write_rel_triples(new_data_path,o_rel_triples_1,o_rel_triples_2)
#write ill
write_train_val_test_ill_pairs(new_data_path,valid_data_path, ent_ills)
#write predicate.
write_predicate_local_name(new_data_path,rel2name,vaule2en,o_rels_1,o_rels_2,o_atts_1,o_atts_2)
#write entity name
write_entity_name(new_data_path,o_ents_1,o_ents_2,entity2name,vaule2en)
#write att triples.
write_att_triples_data(new_data_path,o_att_triples_1,o_att_triples_2,vaule2en)
|
from typing import Union
from objects.player import Player
from objects.const import Privileges
from config import prefix
import packets
import json
import re
commands = {}
def command(rgx: str, perms: int):
def inner(func):
commands[json.dumps({'rgx': rgx, 'perms': perms})] = func
return func
return inner
@command(f'^\{prefix}alert\ (?P<msg>.*)$', Privileges.Normal)
async def alert(msg: dict, p: Player) -> str:
from cache import online
msg = msg['msg']
for key in online:
x = online[key]
x.enqueue.append(packets.notification(msg.replace(r'\n', '\n')))
return 'Alert Sent!'
@command(f'^\{prefix}py\ (?P<args>.*)', Privileges.Normal)
async def py(msg: dict, p: Player) -> str:
#TODO: change from normal user to admin
import cache
try:
f = {}
exec(f'async def _py(p, msg, cache):\n {msg["args"]}', f)
output = await f['_py'](p, msg, cache)
if output:
return str(output)
else:
return 'Success'
except Exception as e:
return str(e).replace(r'\n', '\n')
async def process_cmd(msg: str, p: Player) -> str:
for x in commands:
xx = json.loads(x)
rgx = xx['rgx']
perms = Privileges(xx['perms'])
rgx = re.compile(rgx)
if not (m := rgx.match(msg)):
continue
if not p.privileges & perms:
continue
return await commands[x](m.groupdict(), p)
return None |
import os
import typing
import jk_typing
import jk_utils
import jk_json
import jk_prettyprintobj
class DrivePartitionInfo(jk_prettyprintobj.DumpMixin):
################################################################################################################################
## Constructor
################################################################################################################################
#
# Constructor method.
#
@jk_typing.checkFunctionSignature()
def __init__(self, jdata_lsblk_disk:dict):
assert jdata_lsblk_disk["type"] == "part"
self.devicePath = jdata_lsblk_disk["dev"]
self.fsavail = jdata_lsblk_disk["fsavail"]
self.fssize = jdata_lsblk_disk["fssize"]
self.fsused = jdata_lsblk_disk["fsused"]
self.fstype = jdata_lsblk_disk["fstype"]
self.mountpoint = jdata_lsblk_disk["mountpoint"]
self.partflags = jdata_lsblk_disk["partflags"]
self.parttype = jdata_lsblk_disk["parttype"]
self.partlabel = jdata_lsblk_disk["partlabel"]
self.partuuid = jdata_lsblk_disk["partuuid"]
self.ptuuid = jdata_lsblk_disk["ptuuid"]
self.uuid = jdata_lsblk_disk["uuid"]
self.size = jdata_lsblk_disk["size"]
#
################################################################################################################################
## Public Properties
################################################################################################################################
################################################################################################################################
## Helper Methods
################################################################################################################################
def _dumpVarNames(self) -> list:
return [
"devicePath",
"fsavail",
"fssize",
"fsused",
"fstype",
"mountpoint",
"partflags",
"parttype",
"partlabel",
"partuuid",
"ptuuid",
"uuid",
"size",
"formFactor",
"nominalMediaRotationRate",
"firmwareRevision",
"transportHR",
"isNCQSupported",
"isTRIMSupported",
]
#
################################################################################################################################
## Public Methods
################################################################################################################################
def toJSON(self) -> dict:
return {
"devicePath": self.devicePath,
"fsavail": self.fsavail,
"fssize": self.fssize,
"fsused": self.fsused,
"fstype": self.fstype,
"mountpoint": self.mountpoint,
"partflags": self.partflags,
"parttype": self.parttype,
"partlabel": self.partlabel,
"partuuid": self.partuuid,
"ptuuid": self.ptuuid,
"uuid": self.uuid,
"size": self.size,
}
#
#
|
from django.shortcuts import render
import cookie_handler
from decorate import *
import requests
import json
api_link = settings.API_ADDRESS
headers = settings.HEADERS
EQUIPID_COMPANY = settings.EQUIPID_COMPANY
EQUIPID_OUR = settings.EQUIPID_OUR
# 获取所有设备数据
def get_Device_Data(request):
userid, presend_cookie = cookie_handler.get_cookie(request)
data = {"id": "3"}
# 从服务器获取所有设备信息
result = requests.post(api_link + "/Api/DeviceData/List", headers=headers, cookies=presend_cookie,
json=data).text
# print(result)
return result
# 根据EquipID查询设备数据
def get_Device_Data_by_EquipID(request, EquipID, *args, **kwargs):
print("00000000000000000000000000000000")
print(EquipID)
print("00000000000000000000000000000000")
userid, presend_cookie = cookie_handler.get_cookie(request)
data = {"EquipID": EquipID}
# print(data)
# 从服务器获取所有设备信息
result = requests.post(api_link + "/Api/DeviceData/ListByEquipID", headers=headers, cookies=presend_cookie,
json=data).text
print(result)
return result
# 设备管理(查看)
@check_permiss(get_Device_Data)
def main(request, return_context):
if "basedata" in return_context.keys():
infos = return_context["basedata"]['appendData']
only_data_devices = []
for info in infos:
# if info["EquipID"] =="01":
#
# if info["EquipID"] == "100":
try:
if int(info["DevID"]) < 100:
only_data_devices.append(info)
except:
pass
count = len(only_data_devices)
return_context["data"] = only_data_devices
return_context["count"] = count
return render(request, 'device/main.html', context=return_context)
# 设备管理(控制)
@check_permiss(get_Device_Data)
def control(request, return_context):
if "basedata" in return_context.keys():
infos = return_context["basedata"]['appendData']
only_data_devices = []
for info in infos:
try:
if int(info["DevID"]) >= 100:
only_data_devices.append(info)
except:
pass
count = len(only_data_devices)
return_context["data"] = only_data_devices
return_context["count"] = count
return render(request, 'device/control.html', context=return_context)
# 获取设备数据(查看)
# def getDeviceData(request):
# print("----------ininin")
# result = get_Device_Data(request)
# result = json.loads(result)
#
# only_data_devices = []
# if "appendData" in result.keys():
# device_infos = result['appendData']
# # print(user_infos)
# only_data_device_count = len(only_data_devices)
# for device_info in device_infos:
# print(device_info)
# try:
# if int(device_info["DevID"]) < 100:
# only_data_devices.append(device_info)
# except:
# pass
#
# else:
# only_data_devices = ""
# only_data_device_count = 0
# # 用户数量
# print(only_data_device_count)
# return JsonResponse({"code": 0, "msg": "", "count": only_data_device_count, "data": only_data_devices})
# 获取设备数据 (控制)
# def getContorlDevice(request):
# result = get_Device_Data(request)
# result = json.loads(result)
#
# # print(result)
# # 设备信息
# only_data_devices = []
# if "appendData" in result.keys():
# device_infos = result['appendData']
# # print(user_infos)
# only_data_device_count = len(only_data_devices)
# for device_info in device_infos:
# try:
# if int(device_info["DevID"]) >= 100:
# only_data_devices.append(device_info)
# except:
# pass
# else:
# only_data_devices = ""
# only_data_device_count = 0
# # 用户数量
# return JsonResponse({"code": 0, "msg": "", "count": only_data_device_count, "data": only_data_devices})
# , device_id="", operate_code=""
# 修改设备状态
@auth
def UpDataDevState(request, equip_id, device_id, device_status):
status = "false"
# operate_code:1为开,0为关
userid, presend_cookie = cookie_handler.get_cookie(request)
data = {"deviceState": device_status, "deviceID": device_id, "EquipID": equip_id}
result = requests.post(api_link + "/Api/DeviceData/UpDataDevState", headers=headers, cookies=presend_cookie,
json=data).text
result = json.loads(result)
print(result)
if "errorData" in result.keys():
if result['errorData'] == "修改设备数据成功":
status = "true"
print(result)
return JsonResponse({"status": status})
@check_permiss(get_Device_Data_by_EquipID)
def api_get_device_Data(request, EquipID, readonly, return_context):
if "basedata" in return_context.keys():
infos = return_context["basedata"]['appendData']
only_data_devices = []
# print(infos)
if readonly == "1":
only_data_devices = []
for info in infos:
try:
if int(info["DevID"]) < 100:
only_data_devices.append(info)
except:
pass
infos = only_data_devices
# DevID>=100的为控制设备
elif readonly == "0":
only_data_devices = []
for info in infos:
try:
if int(info["DevID"]) >= 100:
only_data_devices.append(info)
except:
pass
infos = only_data_devices
count = len(only_data_devices)
# test_data = {'DevDataName': '大门', 'DevDataUnit': '', 'EquipID': 100, 'DevDataType': 0, 'DevID': '1', 'DevData': '1110'}
# {'DevDataName': '大门', 'DevDataUnit': '', 'EquipID': '100', 'DevDataType': 0, 'DevID': '103', 'DevData': '0'}
return JsonResponse({"code": 0, "msg": "", "count": count, "data": only_data_devices})
def first(request):
return render(request, 'device/first.html')
@check_permiss(get_data_func=get_Device_Data_by_EquipID)
def renderByEquipID(request, EquipID, readonly, return_context):
render_template = 'device/renderByEquipID_readonly.html'
if "basedata" in return_context.keys():
infos = return_context["basedata"]['appendData']
return_context["readonly"] = readonly
# DevID>=100的只需显示数据的设备
if readonly == "1":
only_data_devices = []
for info in infos:
try:
if int(info["DevID"]) < 100:
only_data_devices.append(info)
except:
pass
infos = only_data_devices
# DevID>=100的为控制设备
elif readonly == "0":
only_data_devices = []
for info in infos:
try:
if int(info["DevID"]) >= 100:
only_data_devices.append(info)
except:
pass
infos = only_data_devices
render_template = "device/renderByEquipID_control.html"
print(infos)
count = len(infos)
return_context["data"] = infos
return_context["count"] = count
return_context["EquipID"] = EquipID
return_context["readonly"] = readonly
return render(request, render_template, context=return_context)
|
# Generated by Django 2.0.3 on 2018-05-16 07:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('estaciones', '0008_auto_20180515_2221'),
]
operations = [
migrations.AddField(
model_name='estacion',
name='impactar',
field=models.CharField(choices=[('Si', 'Si'), ('No', 'No')], default='Si', max_length=255),
),
]
|
from django.conf.urls import patterns, url
from .views import refresh_status
from .views import CredentialView, CredentialSSLView
urlpatterns = patterns('',
url(r"^credential/(?P<pk>\d*)$",
CredentialView.as_view(),
name="credential-detail"),
url(r"^status/(?P<database_id>\d*)$",
refresh_status,
name="logical_database_refresh_status"),
url(r"^credentialssl/(?P<pk>\d*)$",
CredentialSSLView.as_view(),
name="credentialssl-detail"),
)
|
names = []
while True:
print("Input the name of person " + str(len(names) + 1) + ", or input nothing to stop.")
name = input()
if(name == ''):
break
else:
names += [name]
print("The object names are: ")
for name in names:
print(" " + name)
|
#lab8 James Dumitru
#Using built in
number_1 = int(input("Enter a number: "))
number_2 = int(input("Enter a number: "))
number_3 = int(input("Enter a number: "))
number_4 = int(input("Enter a number: "))
number_5 = int(input("Enter a number: "))
number_6 = int(input("Enter a number: "))
num_list = []
num_list.append(number_1)
num_list.append(number_2)
num_list.append(number_3)
num_list.append(number_4)
num_list.append(number_5)
num_list.append(number_6)
#Sort in Inputted order
print("~~~~~~ Inputted Order ~~~~~~~~")
print(num_list)
#Sort in increasing order
print("~~~~~~ Increasing Order ~~~~~~")
num_list.sort()
print(num_list)
#Sort in decreasing order
print("~~~~~~ Decreasing Order ~~~~~~")
num_list.sort(reverse=True)
print(num_list)
|
#!/usr/bin/env python
from lib.OptionsParser import *
from lib.PlummerModel import *
from lib.DiscModel import *
from lib.IsoModel import *
from lib.EtaModel import *
def plot(plot):
if plot:
fig = plt.figure()
ax = fig.add_subplot(111)
ax.hist(eccs,bins=40,cumulative=True,normed=True,alpha=0.5)
ax.plot(np.arange(0.0,1.0,0.01),np.power(np.arange(0.0,1.0,0.01),2.0))
ax.plot(np.arange(0.0,1.0,0.01),np.power(np.arange(0.0,1.0,0.01),3.6))
plt.show()
def main():
# Parse options
op = OptionsParser()
op = op.get_args()
if op.command == "convert":
mass, pos, vel = read_input_file(op.filename)
N = len(mass)
assert(N == len(pos))
assert(N == len(vel))
convert_to_nbody_units(pos, vel, mass, N)
else:
if op.command == "disc":
mps = DiscModel(op)
elif op.command == "iso":
mps = IsoModel(op)
elif op.command == "eta":
mps = EtaModel(op)
elif op.command == "plummer":
mps = PlummerModel(op)
mps.create_model()
print_model(mps.pos, mps.vel, mps.mass, op.outfile)
if op.plot and op.command == "iso":
plot_radial_profile(mps.radii)
if __name__ == "__main__":
main()
|
import urllib.request
import urllib.parse
import urllib.error
from bs4 import BeautifulSoup
import re
import ssl
import sys
import json
import ast
import os
from urllib.request import Request, urlopen
# For ignoring SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
if not os.path.exists('output'):
os.makedirs('output')
with open('song_urls.txt', 'r') as input_file:
url = input_file.readline()
while url:
# Making the website believe that you are accessing it using a mozilla browser
req = Request(url, headers = { 'User-Agent' : 'Mozilla/5.0' })
webpage = urlopen(req).read()
# Creating a BeautifulSoup object of the html page for easy extraction of data.
soup = BeautifulSoup(webpage, 'html.parser')
html = soup.prettify('utf-8')
song_json = {}
song_json["Lyrics"] = []
song_json["Comments"] = []
#Extract Title of the song
for title in soup.findAll('title'):
song_json["Title"] = title.text.strip()
# Extract the release date of the song
for span in soup.findAll('span', attrs = {'class': 'metadata_unit-info metadata_unit-info–text_only'}):
song_json["Release date"] = span.text.strip()
# Extract the Comments on the song
for div in soup.findAll('div', attrs = {'class': 'rich_text_formatting'}):
comments = div.text.strip().split("\n")
for comment in comments:
if comment!="":
song_json["Comments"].append(comment)
#Extract the Lyrics of the song
for div in soup.findAll('div', attrs = {'class': 'lyrics'}):
song_json["Lyrics"].append(div.text.strip().split("\n"))
#Save the json created with the file name as title + .json
file_title = re.sub('[^A-Za-z0-9]+', '', song_json["Title"])
with open(f'./output/{file_title}.json', 'w') as outfile:
json.dump(song_json, outfile, indent = 4, ensure_ascii = False)
# Save the html content into an html file with name as title + .html
# with open(f'./output/{file_title}.html', 'wb') as file:
# file.write(html)
print(f'———-Extraction of {url.rstrip()} is complete. Check json file.———-')
url = input_file.readline() |
from person import Person
person_john = Person("John", "01/03/1984", "male")
class Student(Person):
def __init__(self, name, b_date, sex, faculty):
super().__init__(name, b_date, sex)
self._change_name()
self.faculty = faculty
self._test_value = self.blabla(name, 2)
@property
def test_value(self):
return self._test_value
@staticmethod
def blabla(name, number):
return name * number
def _change_name(self):
if "student " not in self.name:
self.name = "student " + self.name
def __test(self):
print("TEST")
# student_john = Student("John", "01/03/1984", "male", "Math")
#
# print(student_john.name, student_john.get_age(), student_john.faculty)
#
# print(student_john.test_value)
class Employer(Person):
# def __str__(self):
# return f"Name: {self.name}, Age: {self.age}"
def __repr__(self):
return f"({self.name}, {self.age})"
def _change_name(self):
pass
# if "employer " not in self.name:
# self.name = "employer " + self.name
employer_jane = Employer("Jane", "21/10/2001", "female")
employer_noname = Employer("Jane", "21/10/2001", "female")
print(employer_jane, employer_noname)
# my_list = [employer_jane, employer_jane]
# print(my_list)
print(employer_jane == employer_noname) |
species(
label = 'C=[C]OO[C](CCCC)CCOO(29153)',
structure = SMILES('C=[C]OO[C](CCCC)CCOO'),
E0 = (126.125,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,360,370,350,2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,350,500,795,815,3615,1310,387.5,850,1000,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.91069,0.150544,-0.000133515,3.37694e-09,6.01858e-11,15362,52.2008], Tmin=(100,'K'), Tmax=(523.036,'K')), NASAPolynomial(coeffs=[11.0698,0.0847832,-4.10233e-05,7.97731e-09,-5.61506e-13,13545.8,-6.39164], Tmin=(523.036,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(126.125,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(C2CsJOOC)"""),
)
species(
label = 'CH2CO(28)',
structure = SMILES('C=C=O'),
E0 = (-60.8183,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,2120,512.5,787.5],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (42.0367,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3625.12,'J/mol'), sigma=(3.97,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.13241,0.0181319,-1.74093e-05,9.35336e-09,-2.01725e-12,-7148.09,13.3808], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[5.75871,0.00635124,-2.25955e-06,3.62322e-10,-2.15856e-14,-8085.33,-4.9649], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-60.8183,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), label="""CH2CO""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'CCCCC(=O)CCOO(16766)',
structure = SMILES('CCCCC(=O)CCOO'),
E0 = (-419.494,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,375,552.5,462.5,1710,3615,1310,387.5,850,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (146.184,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4468.5,'J/mol'), sigma=(7.51027,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=697.97 K, Pc=23.94 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.021909,0.106478,-5.11605e-05,-1.11163e-07,1.3815e-10,-50327.2,35.0186], Tmin=(100,'K'), Tmax=(471.151,'K')), NASAPolynomial(coeffs=[8.18504,0.0687158,-3.25389e-05,6.25288e-09,-4.36324e-13,-51454.8,-2.15787], Tmin=(471.151,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-419.494,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdCsCs)"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=[C]OOC(=CCCC)CCOO(29190)',
structure = SMILES('C=[C]OOC(=CCCC)CCOO'),
E0 = (67.6788,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,350,440,435,1725,350,500,795,815,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (187.213,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.16851,0.146521,-0.000156312,9.74569e-08,-2.59486e-11,8352.72,54.6823], Tmin=(100,'K'), Tmax=(888.84,'K')), NASAPolynomial(coeffs=[13.7198,0.0750197,-3.56473e-05,6.95412e-09,-4.93423e-13,5528.27,-20.0976], Tmin=(888.84,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(67.6788,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(640.214,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO)"""),
)
species(
label = 'C=[C]OOC(=CCOO)CCCC(29191)',
structure = SMILES('C=[C]OOC(=CCOO)CCCC'),
E0 = (73.2357,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,350,440,435,1725,350,500,795,815,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (187.213,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.10875,0.145643,-0.000157985,1.02273e-07,-2.84658e-11,9018.38,54.8524], Tmin=(100,'K'), Tmax=(850.619,'K')), NASAPolynomial(coeffs=[12.5279,0.0768135,-3.66083e-05,7.14324e-09,-5.06601e-13,6528.36,-13.3929], Tmin=(850.619,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(73.2357,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(640.214,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO)"""),
)
species(
label = 'C#COO[C](CCCC)CCOO(29192)',
structure = SMILES('C#COO[C](CCCC)CCOO'),
E0 = (117.898,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([750,770,3400,2100,2750,2800,2850,1350,1500,750,1050,1375,1000,2175,525,360,370,350,2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,350,500,795,815,3615,1310,387.5,850,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (187.213,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.21713,0.172027,-0.000228501,1.80568e-07,-5.97027e-11,14427.3,51.1499], Tmin=(100,'K'), Tmax=(730.748,'K')), NASAPolynomial(coeffs=[13.6755,0.0795875,-3.88083e-05,7.56197e-09,-5.32525e-13,11957.7,-25.0533], Tmin=(730.748,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(117.898,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(636.057,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-OsCt) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Ct-CtOs) + group(Ct-CtH) + radical(C2CsJOOC)"""),
)
species(
label = 'npropyl(83)',
structure = SMILES('[CH2]CC'),
E0 = (87.0621,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(0.0928812,'amu*angstrom^2'), symmetry=1, barrier=(2.13552,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.092914,'amu*angstrom^2'), symmetry=1, barrier=(2.13628,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (43.0877,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2218.31,'J/mol'), sigma=(4.982,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.02815,0.0147023,2.4051e-05,-3.66738e-08,1.38611e-11,10512.1,12.4699], Tmin=(100,'K'), Tmax=(984.464,'K')), NASAPolynomial(coeffs=[6.16543,0.0184495,-6.79029e-06,1.23049e-09,-8.63866e-14,9095.06,-6.67607], Tmin=(984.464,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(87.0621,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), label="""npropyl""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'C=[C]OOC(=C)CCOO(29193)',
structure = SMILES('C=[C]OOC(=C)CCOO'),
E0 = (150.13,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,3615,1310,387.5,850,1000,350,440,435,1725,350,500,795,815,2750,2783.33,2816.67,2850,1425,1450,1225,1275,1270,1340,700,800,300,400,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (145.133,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.524473,0.107983,-0.000133208,9.55142e-08,-2.87137e-11,18211.8,41.8042], Tmin=(100,'K'), Tmax=(799.316,'K')), NASAPolynomial(coeffs=[11.0582,0.0500208,-2.44372e-05,4.79593e-09,-3.40309e-13,16360.1,-11.481], Tmin=(799.316,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(150.13,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(428.195,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsHH) + group(Cds-CdsCsOs) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(C=CJO)"""),
)
species(
label = 'CH2OOH(35)',
structure = SMILES('[CH2]OO'),
E0 = (52.1952,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3615,1310,387.5,850,1000,3000,3100,440,815,1455,1000],'cm^-1')),
HinderedRotor(inertia=(2.16183,'amu*angstrom^2'), symmetry=1, barrier=(49.7048,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (47.0333,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3467.13,'J/mol'), sigma=(3.69,'angstroms'), dipoleMoment=(1.7,'De'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.0, comment="""NOx2018"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[5.83127,-0.00351771,4.54551e-05,-5.66903e-08,2.21633e-11,6061.87,-0.579143], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[6.98746,0.00900484,-3.24367e-06,5.24325e-10,-3.13587e-14,5012.58,-10.2619], Tmin=(1000,'K'), Tmax=(2500,'K'))], Tmin=(200,'K'), Tmax=(2500,'K'), E0=(52.1952,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(124.717,'J/(mol*K)'), label="""CH2OOH""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C=[C]OOC(=C)CCCC(29194)',
structure = SMILES('C=[C]OOC(=C)CCCC'),
E0 = (190.892,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,350,440,435,1725,350,500,795,815,2750,2770,2790,2810,2830,2850,1425,1437.5,1450,1225,1250,1275,1270,1305,1340,700,750,800,300,350,400,200,800,1200,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (141.188,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.274751,0.0994946,-8.07129e-05,3.64098e-08,-7.12375e-12,23108.2,41.9135], Tmin=(100,'K'), Tmax=(1159.17,'K')), NASAPolynomial(coeffs=[11.9495,0.0573113,-2.61259e-05,5.01513e-09,-3.52734e-13,20274.2,-18.8669], Tmin=(1159.17,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(190.892,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(527.969,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-O2s(Cds-Cd)) + group(O2s-O2s(Cds-Cd)) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(C=CJO)"""),
)
species(
label = 'C=[C]OOC([CH]CCC)CCOO(29195)',
structure = SMILES('C=[C]OOC([CH]CCC)CCOO'),
E0 = (140.52,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.80007,0.161774,-0.000190743,1.33388e-07,-3.94911e-11,17134.7,57.5687], Tmin=(100,'K'), Tmax=(808.289,'K')), NASAPolynomial(coeffs=[13.8365,0.0794477,-3.79718e-05,7.39034e-09,-5.22351e-13,14445.1,-19.1532], Tmin=(808.289,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(140.52,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(CCJCOOH)"""),
)
species(
label = 'C=[C]OOC([CH]COO)CCCC(29196)',
structure = SMILES('C=[C]OOC([CH]COO)CCCC'),
E0 = (140.52,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.80007,0.161774,-0.000190743,1.33388e-07,-3.94911e-11,17134.7,57.5687], Tmin=(100,'K'), Tmax=(808.289,'K')), NASAPolynomial(coeffs=[13.8365,0.0794477,-3.79718e-05,7.39034e-09,-5.22351e-13,14445.1,-19.1532], Tmin=(808.289,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(140.52,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(CCJCOOH)"""),
)
species(
label = '[CH]=COO[C](CCCC)CCOO(29197)',
structure = SMILES('[CH]=COO[C](CCCC)CCOO'),
E0 = (133.477,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,3010,987.5,1337.5,450,1655,360,370,350,2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,350,500,795,815,3615,1310,387.5,850,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.95743,0.165066,-0.000193074,1.31345e-07,-3.7582e-11,16293.6,53.0332], Tmin=(100,'K'), Tmax=(836.846,'K')), NASAPolynomial(coeffs=[15.2203,0.0781772,-3.73269e-05,7.26676e-09,-5.13981e-13,13251.2,-31.4255], Tmin=(836.846,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(133.477,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(Cds_P) + radical(C2CsJOOC)"""),
)
species(
label = 'C=[C]OOC(C[CH]CC)CCOO(29198)',
structure = SMILES('C=[C]OOC(C[CH]CC)CCOO'),
E0 = (134.564,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.68063,0.145497,-0.000118801,-2.06804e-08,7.66847e-11,16368.4,54.0326], Tmin=(100,'K'), Tmax=(513.892,'K')), NASAPolynomial(coeffs=[10.6119,0.0842171,-4.03458e-05,7.80875e-09,-5.48043e-13,14650.7,-1.50791], Tmin=(513.892,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(134.564,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(RCCJCC)"""),
)
species(
label = 'C=[C]OOC(C[CH]OO)CCCC(29199)',
structure = SMILES('C=[C]OOC(C[CH]OO)CCCC'),
E0 = (128.688,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.83976,0.162706,-0.00019246,1.35274e-07,-4.02613e-11,15712.9,55.5552], Tmin=(100,'K'), Tmax=(804.227,'K')), NASAPolynomial(coeffs=[13.7877,0.0800119,-3.82342e-05,7.43749e-09,-5.25433e-13,13038.2,-21.0413], Tmin=(804.227,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(128.688,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(CCsJOOH)"""),
)
species(
label = 'C=[C]OOC(CC[CH]C)CCOO(29200)',
structure = SMILES('C=[C]OOC(CC[CH]C)CCOO'),
E0 = (134.552,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.15169,0.152391,-0.000155204,6.4245e-08,9.1551e-12,16388.1,55.7781], Tmin=(100,'K'), Tmax=(570.291,'K')), NASAPolynomial(coeffs=[11.4229,0.0827798,-3.94473e-05,7.64075e-09,-5.37591e-13,14423.5,-5.73769], Tmin=(570.291,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(134.552,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(RCCJC) + radical(C=CJO)"""),
)
species(
label = 'C=COO[C]([CH]CCC)CCOO(29201)',
structure = SMILES('C=COO[C]([CH]CCC)CCOO'),
E0 = (86.7962,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.89196,0.163979,-0.00019384,1.35274e-07,-3.99048e-11,10676.3,54.6991], Tmin=(100,'K'), Tmax=(811.493,'K')), NASAPolynomial(coeffs=[14.1864,0.0797991,-3.8243e-05,7.45082e-09,-5.26931e-13,7904.43,-24.1272], Tmin=(811.493,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(86.7962,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C2CsJOOC) + radical(CCJCOOH)"""),
)
species(
label = 'C=COO[C]([CH]COO)CCCC(29202)',
structure = SMILES('C=COO[C]([CH]COO)CCCC'),
E0 = (86.7962,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.89199,0.163979,-0.000193841,1.35276e-07,-3.99054e-11,10676.3,54.6992], Tmin=(100,'K'), Tmax=(811.455,'K')), NASAPolynomial(coeffs=[14.1863,0.0797992,-3.8243e-05,7.45084e-09,-5.26932e-13,7904.44,-24.127], Tmin=(811.455,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(86.7962,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CCJCOOH) + radical(C2CsJOOC)"""),
)
species(
label = '[CH]=[C]OOC(CCCC)CCOO(29203)',
structure = SMILES('[CH]=[C]OOC(CCCC)CCOO'),
E0 = (187.202,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,2750,2800,2850,1350,1500,750,1050,1375,1000,1685,370,3615,1310,387.5,850,1000,2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,350,500,795,815,1380,1390,370,380,2900,435,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.86428,0.162844,-0.000189904,1.29341e-07,-3.71075e-11,22751.9,55.8985], Tmin=(100,'K'), Tmax=(834.252,'K')), NASAPolynomial(coeffs=[14.8668,0.0778324,-3.70598e-05,7.20728e-09,-5.09487e-13,19793.3,-26.4316], Tmin=(834.252,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(187.202,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(Cds_P) + radical(C=CJO)"""),
)
species(
label = '[CH2]CCCC(CCOO)OO[C]=C(29204)',
structure = SMILES('[CH2]CCCC(CCOO)OO[C]=C'),
E0 = (145.352,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,350,500,795,815,3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.72068,0.159652,-0.000182207,1.22152e-07,-3.469e-11,17713.4,56.7677], Tmin=(100,'K'), Tmax=(840.252,'K')), NASAPolynomial(coeffs=[14.3169,0.0785487,-3.74286e-05,7.28794e-09,-5.15784e-13,14850.1,-22.4639], Tmin=(840.252,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(145.352,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(RCCJ)"""),
)
species(
label = 'C=[C]OOC(CCCC)CCO[O](29205)',
structure = SMILES('C=[C]OOC(CCCC)CCO[O]'),
E0 = (92.1101,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,350,500,795,815,492.5,1135,1000,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.61534,0.156851,-0.00018225,1.27434e-07,-3.78729e-11,11306.3,55.3557], Tmin=(100,'K'), Tmax=(804.584,'K')), NASAPolynomial(coeffs=[13.0477,0.0789859,-3.70909e-05,7.16239e-09,-5.03977e-13,8785.77,-16.8043], Tmin=(804.584,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(92.1101,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(665.158,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(ROOJ) + radical(C=CJO)"""),
)
species(
label = 'C=COO[C](C[CH]CC)CCOO(29206)',
structure = SMILES('C=COO[C](C[CH]CC)CCOO'),
E0 = (80.8396,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.83866,0.148822,-0.000128268,-4.18208e-09,6.47265e-11,9912.84,51.3828], Tmin=(100,'K'), Tmax=(521.379,'K')), NASAPolynomial(coeffs=[10.9233,0.0846423,-4.06631e-05,7.8808e-09,-5.53625e-13,8123.63,-6.27099], Tmin=(521.379,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(80.8396,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C2CsJOOC) + radical(RCCJCC)"""),
)
species(
label = 'C=COO[C](C[CH]OO)CCCC(29207)',
structure = SMILES('C=COO[C](C[CH]OO)CCCC'),
E0 = (74.9638,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.93139,0.164908,-0.000195541,1.37136e-07,-4.06627e-11,9254.53,52.6846], Tmin=(100,'K'), Tmax=(807.611,'K')), NASAPolynomial(coeffs=[14.137,0.0803644,-3.8506e-05,7.49812e-09,-5.30026e-13,6497.79,-26.0122], Tmin=(807.611,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(74.9638,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C2CsJOOC) + radical(CCsJOOH)"""),
)
species(
label = 'C=COO[C](CC[CH]C)CCOO(29208)',
structure = SMILES('C=COO[C](CC[CH]C)CCOO'),
E0 = (80.8276,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.31399,0.155702,-0.000164009,7.78164e-08,5.51133e-13,9932.78,53.1465], Tmin=(100,'K'), Tmax=(580.354,'K')), NASAPolynomial(coeffs=[11.7631,0.0831472,-3.97275e-05,7.70334e-09,-5.42343e-13,7886.75,-10.6582], Tmin=(580.354,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(80.8276,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(RCCJC) + radical(C2CsJOOC)"""),
)
species(
label = '[CH2]CCC[C](CCOO)OOC=C(29209)',
structure = SMILES('[CH2]CCC[C](CCOO)OOC=C'),
E0 = (91.6276,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.81391,0.161876,-0.000185386,1.2417e-07,-3.51727e-11,11255.1,53.9027], Tmin=(100,'K'), Tmax=(842.826,'K')), NASAPolynomial(coeffs=[14.6714,0.0788916,-3.76945e-05,7.34712e-09,-5.20253e-13,8307.72,-27.4637], Tmin=(842.826,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(91.6276,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C2CsJOOC) + radical(RCCJ)"""),
)
species(
label = 'C=COO[C](CCCC)CCO[O](29210)',
structure = SMILES('C=COO[C](CCCC)CCO[O]'),
E0 = (38.386,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.70719,0.159055,-0.000185341,1.29308e-07,-3.82797e-11,4847.97,52.486], Tmin=(100,'K'), Tmax=(808.074,'K')), NASAPolynomial(coeffs=[13.3967,0.0793389,-3.73631e-05,7.22312e-09,-5.08579e-13,2245.4,-21.7738], Tmin=(808.074,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(38.386,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(665.158,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(ROOJ) + radical(C2CsJOOC)"""),
)
species(
label = 'C=[C][O](173)',
structure = SMILES('[CH2][C]=O'),
E0 = (160.185,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,539.612,539.669],'cm^-1')),
HinderedRotor(inertia=(0.000578908,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (42.0367,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.39563,0.0101365,2.30741e-06,-8.97566e-09,3.68242e-12,19290.3,10.0703], Tmin=(100,'K'), Tmax=(1068.9,'K')), NASAPolynomial(coeffs=[6.35055,0.00638951,-2.69368e-06,5.4221e-10,-4.02476e-14,18240.9,-6.33602], Tmin=(1068.9,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(160.185,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), comment="""Thermo library: Klippenstein_Glarborg2016 + radical(CsCJ=O) + radical(CJC=O)"""),
)
species(
label = 'CCCC[C]([O])CCOO(16758)',
structure = SMILES('CCCC[C]([O])CCOO'),
E0 = (-69.4491,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,360,370,350,3615,1310,387.5,850,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (146.184,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.38813,0.12719,-0.000146358,1.00264e-07,-2.90472e-11,-8166.49,40.7595], Tmin=(100,'K'), Tmax=(826.048,'K')), NASAPolynomial(coeffs=[11.9547,0.0625799,-2.90356e-05,5.57995e-09,-3.91711e-13,-10370.9,-21.0624], Tmin=(826.048,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-69.4491,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(548.755,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-CsH) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(C2CsJOH)"""),
)
species(
label = 'C=COOC(=CCCC)CCOO(29211)',
structure = SMILES('C=COOC(=CCCC)CCOO'),
E0 = (-172.065,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.4249,0.145703,-0.000134366,6.69904e-08,-1.39318e-11,-20467,52.446], Tmin=(100,'K'), Tmax=(1129.12,'K')), NASAPolynomial(coeffs=[19.5343,0.0679103,-3.10198e-05,5.97099e-09,-4.21223e-13,-25425.9,-56.1607], Tmin=(1129.12,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-172.065,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(665.158,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH)"""),
)
species(
label = 'C=COOC(=CCOO)CCCC(29212)',
structure = SMILES('C=COOC(=CCOO)CCCC'),
E0 = (-166.508,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.26551,0.143628,-0.000131766,6.61377e-08,-1.39652e-11,-19805.6,52.2605], Tmin=(100,'K'), Tmax=(1107.52,'K')), NASAPolynomial(coeffs=[18.085,0.0701284,-3.22193e-05,6.21535e-09,-4.3891e-13,-24313.3,-47.9971], Tmin=(1107.52,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-166.508,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(665.158,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C=[C]OO[C](CCC)CCOO(9247)',
structure = SMILES('C=[C]OO[C](CCC)CCOO'),
E0 = (149.906,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,360,370,350,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,3615,1310,387.5,850,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (174.194,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.15068,0.13357,-0.000105401,-4.14549e-08,9.40625e-11,18194.5,47.2692], Tmin=(100,'K'), Tmax=(499.005,'K')), NASAPolynomial(coeffs=[10.3979,0.0758908,-3.69073e-05,7.16921e-09,-5.03296e-13,16607.5,-4.77101], Tmin=(499.005,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(149.906,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(590.328,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(C2CsJOOC)"""),
)
species(
label = 'OH(5)',
structure = SMILES('[OH]'),
E0 = (28.372,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3287.46],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (17.0073,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.4858,0.00133397,-4.70043e-06,5.64379e-09,-2.06318e-12,3411.96,1.99788], Tmin=(100,'K'), Tmax=(1005.25,'K')), NASAPolynomial(coeffs=[2.88225,0.00103869,-2.35652e-07,1.40229e-11,6.34581e-16,3669.56,5.59053], Tmin=(1005.25,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(28.372,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""OH""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'C=[C]OOC1(CCCC)CCO1(29213)',
structure = SMILES('C=[C]OOC1(CCCC)CCO1'),
E0 = (0.292657,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (171.214,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-2.38489,0.117208,-4.28104e-05,-4.38706e-08,3.06e-11,286.463,47.5085], Tmin=(100,'K'), Tmax=(929.249,'K')), NASAPolynomial(coeffs=[29.7559,0.0380911,-1.0717e-05,1.70305e-09,-1.16204e-13,-8244.37,-118.955], Tmin=(929.249,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(0.292657,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(627.743,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(Cs-CsCsOsOs) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + ring(Oxetane) + radical(C=CJO)"""),
)
species(
label = '[CH2]C(CCC)(CCOO)OO[C]=C(29214)',
structure = SMILES('[CH2]C(CCC)(CCOO)OO[C]=C'),
E0 = (142.779,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,3000,3100,440,815,1455,1000,180,180,180,180,680.106,1528.38,1600,1800,3000,3200],'cm^-1')),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.12172,0.166991,-0.000194217,1.27556e-07,-3.47254e-11,17419.8,56.1784], Tmin=(100,'K'), Tmax=(882.804,'K')), NASAPolynomial(coeffs=[17.6879,0.0727017,-3.40062e-05,6.56816e-09,-4.62879e-13,13745.7,-41.6218], Tmin=(882.804,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(142.779,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsCsOs) + group(Cs-CsCsHH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CJCOOH) + radical(C=CJO)"""),
)
species(
label = '[CH2]C(CCCC)(COO)OO[C]=C(29215)',
structure = SMILES('[CH2]C(CCCC)(COO)OO[C]=C'),
E0 = (139.432,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3615,1310,387.5,850,1000,2750,2764.29,2778.57,2792.86,2807.14,2821.43,2835.71,2850,1425,1433.33,1441.67,1450,1225,1241.67,1258.33,1275,1270,1293.33,1316.67,1340,700,733.333,766.667,800,300,333.333,366.667,400,350,500,795,815,3000,3100,440,815,1455,1000,180,180,180,180,680.106,1528.38,1600,1800,3000,3200],'cm^-1')),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.153703,'amu*angstrom^2'), symmetry=1, barrier=(3.53393,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.12172,0.166991,-0.000194217,1.27556e-07,-3.47254e-11,17017.2,56.1784], Tmin=(100,'K'), Tmax=(882.804,'K')), NASAPolynomial(coeffs=[17.6879,0.0727017,-3.40062e-05,6.56816e-09,-4.62879e-13,13343.1,-41.6218], Tmin=(882.804,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(139.432,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(661.001,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-CsCsCsOs) + group(Cs-CsCsHH) + longDistanceInteraction_noncyclic(CsCs-ST) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=CJO) + radical(CJCOOH)"""),
)
species(
label = 'C=C1OOC1(CCCC)CCOO(29216)',
structure = SMILES('C=C1OOC1(CCCC)CCOO'),
E0 = (-183.714,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.08983,0.138954,-0.000108742,4.27832e-08,-6.75225e-12,-21826.6,51.657], Tmin=(100,'K'), Tmax=(1499.21,'K')), NASAPolynomial(coeffs=[29.6934,0.0514863,-2.12275e-05,3.8673e-09,-2.62835e-13,-31656.4,-119.778], Tmin=(1499.21,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-183.714,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(673.472,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-O2s(Cds-Cd)) + group(O2s-OsH) + group(Cs-(Cds-Cds)CsCsOs) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsCsOs) + group(Cds-CdsHH) + ring(Cyclobutane)"""),
)
species(
label = 'C=[C]OOC(O)(CC[O])CCCC(29217)',
structure = SMILES('C=[C]OOC(O)(CC[O])CCCC'),
E0 = (-117.862,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (188.221,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-3.18524,0.156748,-0.000154872,8.05549e-08,-1.69648e-11,-13915.6,56.5425], Tmin=(100,'K'), Tmax=(1138.72,'K')), NASAPolynomial(coeffs=[24.7736,0.0585367,-2.55023e-05,4.81578e-09,-3.36796e-13,-20283.1,-81.9749], Tmin=(1138.72,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-117.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(665.158,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-CsH) + group(O2s-O2s(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsCsOsOs) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CCOJ) + radical(C=CJO)"""),
)
species(
label = 'C=[C]O[O](110)',
structure = SMILES('C=[C]O[O]'),
E0 = (339.001,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,492.5,1135,1000,1685,370],'cm^-1')),
HinderedRotor(inertia=(0.0942477,'amu*angstrom^2'), symmetry=1, barrier=(2.16694,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (58.0361,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.10787,0.026676,-5.52143e-05,6.05409e-08,-2.36332e-11,40797.8,12.0288], Tmin=(100,'K'), Tmax=(872.797,'K')), NASAPolynomial(coeffs=[1.34334,0.0171001,-8.40162e-06,1.59781e-09,-1.08413e-13,41778.6,24.1556], Tmin=(872.797,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(339.001,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(174.604,'J/(mol*K)'), comment="""Thermo library: Klippenstein_Glarborg2016 + radical(ROOJ) + radical(C=CJO)"""),
)
species(
label = 'CCCC[C]CCOO(28384)',
structure = SMILES('CCCC[C]CCOO'),
E0 = (144.248,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,3615,1310,387.5,850,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (130.185,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.602271,0.106692,-9.65495e-05,4.98095e-08,-1.103e-11,17510.2,36.2426], Tmin=(100,'K'), Tmax=(1049.61,'K')), NASAPolynomial(coeffs=[12.3607,0.0572903,-2.59494e-05,4.96716e-09,-3.49172e-13,14789,-26.9238], Tmin=(1049.61,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(144.248,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(523.812,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCJ2_triplet)"""),
)
species(
label = 'H2CC(41)',
structure = SMILES('[C]=C'),
E0 = (401.202,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (26.0373,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2480.69,'J/mol'), sigma=(4.48499,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=387.48 K, Pc=62.39 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.28155,0.00697643,-2.38528e-06,-1.21078e-09,9.82042e-13,48319.2,5.92036], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[4.27807,0.00475623,-1.63007e-06,2.54623e-10,-1.4886e-14,48014,0.639979], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(401.202,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(83.1447,'J/(mol*K)'), label="""H2CC""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'CCCC[C](CCOO)O[O](28369)',
structure = SMILES('CCCC[C](CCOO)O[O]'),
E0 = (-66.0289,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2761.11,2772.22,2783.33,2794.44,2805.56,2816.67,2827.78,2838.89,2850,1425,1431.25,1437.5,1443.75,1450,1225,1237.5,1250,1262.5,1275,1270,1287.5,1305,1322.5,1340,700,725,750,775,800,300,325,350,375,400,2750,2800,2850,1350,1500,750,1050,1375,1000,360,370,350,3615,1310,387.5,850,1000,492.5,1135,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (162.184,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-1.9638,0.14211,-0.000191303,1.58369e-07,-5.45582e-11,-7737.14,45.385], Tmin=(100,'K'), Tmax=(778.213,'K')), NASAPolynomial(coeffs=[10.1578,0.0698748,-3.29281e-05,6.29716e-09,-4.37612e-13,-9323.06,-8.1229], Tmin=(778.213,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-66.0289,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(569.541,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-OsH) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(C2CsJOOH) + radical(ROOJ)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (126.125,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (283.718,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (289.274,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (344.8,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (254.912,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (276.204,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (272.832,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (272.832,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (238.914,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (283.305,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (271.362,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (181.046,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (326.321,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (278.67,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (323.182,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (199.761,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (163.238,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (326.321,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (262.439,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (278.67,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (268.695,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (277.785,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (126.125,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (186.793,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (186.793,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (569.768,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (201.981,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (300.097,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (296.75,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (134.41,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS31',
E0 = (236.164,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS32',
E0 = (483.249,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS33',
E0 = (335.173,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['CH2CO(28)', 'CCCCC(=O)CCOO(16766)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['H(3)', 'C=[C]OOC(=CCCC)CCOO(29190)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(137.12,'m^3/(mol*s)'), n=1.63155, Ea=(4.2466,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-CsH_Cds;HJ] for rate rule [Cds-CsH_Cds-OsCs;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction3',
reactants = ['H(3)', 'C=[C]OOC(=CCOO)CCCC(29191)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(137.12,'m^3/(mol*s)'), n=1.63155, Ea=(4.2466,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-CsH_Cds;HJ] for rate rule [Cds-CsH_Cds-OsCs;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['H(3)', 'C#COO[C](CCCC)CCOO(29192)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(4278.27,'m^3/(mol*s)'), n=1.383, Ea=(15.1097,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Ct_Ct;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['npropyl(83)', 'C=[C]OOC(=C)CCOO(29193)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(0.00238412,'m^3/(mol*s)'), n=2.47216, Ea=(17.7199,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-HH_Cds;CsJ-CsHH] for rate rule [Cds-HH_Cds-OsCs;CsJ-CsHH]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['CH2OOH(35)', 'C=[C]OOC(=C)CCCC(29194)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(182.434,'m^3/(mol*s)'), n=0.88, Ea=(33.1163,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds-HH_Cds;CsJ-OsHH] for rate rule [Cds-HH_Cds-OsCs;CsJ-OsHH]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['C=[C]OOC([CH]CCC)CCOO(29195)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(1557.48,'s^-1'), n=2.79033, Ea=(132.312,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;C_rad_out_H/NonDeC;Cs_H_out] for rate rule [R2H_S;C_rad_out_H/NonDeC;Cs_H_out_OOH/Cs]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction8',
reactants = ['C=[C]OOC([CH]COO)CCCC(29196)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(1557.48,'s^-1'), n=2.79033, Ea=(132.312,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;C_rad_out_H/NonDeC;Cs_H_out] for rate rule [R2H_S;C_rad_out_H/NonDeC;Cs_H_out_OOH/Cs]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['[CH]=COO[C](CCCC)CCOO(29197)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(1.08e+06,'s^-1'), n=1.99, Ea=(105.437,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 17 used for R2H_D;Cd_rad_out_singleH;Cd_H_out_singleNd
Exact match found for rate rule [R2H_D;Cd_rad_out_singleH;Cd_H_out_singleNd]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['C=[C]OOC(C[CH]CC)CCOO(29198)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(1.05815e+09,'s^-1'), n=0.95, Ea=(148.741,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R3H_SS_Cs;C_rad_out_H/NonDeC;Cs_H_out] for rate rule [R3H_SS_Cs;C_rad_out_H/NonDeC;Cs_H_out_OOH/Cs]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['C=[C]OOC(C[CH]OO)CCCC(29199)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(2e-15,'s^-1'), n=8.23, Ea=(142.674,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""Estimated using template [R3H_SS_Cs;C_rad_out_H/NonDeO;Cs_H_out] for rate rule [R3H_SS_Cs;C_rad_out_H/NonDeO;Cs_H_out_OOH/Cs]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['C=[C]OOC(CC[CH]C)CCOO(29200)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(0.0013312,'s^-1'), n=4.0075, Ea=(46.4947,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_SSS;C_rad_out_H/NonDeC;Cs_H_out] for rate rule [R4H_SSS;C_rad_out_H/NonDeC;Cs_H_out_OOH/Cs]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COO[C]([CH]CCC)CCOO(29201)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(1.9054e+11,'s^-1'), n=0.853, Ea=(200.196,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out_H/(NonDeC/Cs)] for rate rule [R5HJ_3;Cd_rad_out_Cd;Cs_H_out_H/(NonDeC/Cs)]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COO[C]([CH]COO)CCCC(29202)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(2.54505e+10,'s^-1'), n=0.959062, Ea=(152.545,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out_H/NonDeC] for rate rule [R5HJ_3;Cd_rad_out_Cd;Cs_H_out_H/NonDeC]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['[CH]=[C]OOC(CCCC)CCOO(29203)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(6.04e+10,'s^-1'), n=0.59, Ea=(135.98,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [RnH;Cd_rad_out_singleH;Cs_H_out_Cs2] for rate rule [R5HJ_1;Cd_rad_out_singleH;Cs_H_out_Cs2]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction16',
reactants = ['[CH2]CCCC(CCOO)OO[C]=C(29204)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(7877.47,'s^-1'), n=1.85167, Ea=(54.4094,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_CCC;C_rad_out_2H;Cs_H_out] for rate rule [R5H_CCC;C_rad_out_2H;Cs_H_out_OOH/Cs]
Euclidian distance = 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction17',
reactants = ['C=[C]OOC(CCCC)CCO[O](29205)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(57.9,'s^-1'), n=2.9, Ea=(71.128,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 298 used for R5H_SSSS_OCC;O_rad_out;Cs_H_out_OOH/Cs
Exact match found for rate rule [R5H_SSSS_OCC;O_rad_out;Cs_H_out_OOH/Cs]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction18',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COO[C](C[CH]CC)CCOO(29206)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(1.9054e+11,'s^-1'), n=0.853, Ea=(200.196,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out_H/(NonDeC/Cs)] for rate rule [R6HJ_3;Cd_rad_out_Cd;Cs_H_out_H/(NonDeC/Cs)]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction19',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COO[C](C[CH]OO)CCCC(29207)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(2.22732e+09,'s^-1'), n=1.14213, Ea=(136.313,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out] for rate rule [R6HJ_3;Cd_rad_out_Cd;Cs_H_out_OOH/H]
Euclidian distance = 2.82842712475
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction20',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COO[C](CC[CH]C)CCOO(29208)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(2.54505e+10,'s^-1'), n=0.959062, Ea=(152.545,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out_H/NonDeC] for rate rule [R7HJ_3;Cd_rad_out_Cd;Cs_H_out_H/NonDeC]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction21',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['[CH2]CCC[C](CCOO)OOC=C(29209)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(5.59786e+07,'s^-1'), n=1.58088, Ea=(142.57,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;Cs_H_out_2H] for rate rule [R8Hall;Cd_rad_out_Cd;Cs_H_out_2H]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction22',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COO[C](CCCC)CCO[O](29210)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(4.81182e+08,'s^-1'), n=1.25566, Ea=(151.659,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_Cd;XH_out] for rate rule [R8Hall;Cd_rad_out_Cd;O_H_out]
Euclidian distance = 1.41421356237
family: intra_H_migration"""),
)
reaction(
label = 'reaction23',
reactants = ['C=[C][O](173)', 'CCCC[C]([O])CCOO(16758)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(7.46075e+06,'m^3/(mol*s)'), n=0.027223, Ea=(35.3895,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 35.4 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction24',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COOC(=CCCC)CCOO(29211)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(6.42e+09,'s^-1'), n=0.137, Ea=(60.668,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5;Y_rad_De;XH_Rrad_NDe] for rate rule [R5radEndo;Y_rad_De;XH_Rrad_NDe]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction25',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=COOC(=CCOO)CCCC(29212)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(6.42e+09,'s^-1'), n=0.137, Ea=(60.668,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R5;Y_rad_De;XH_Rrad_NDe] for rate rule [R5radEndo;Y_rad_De;XH_Rrad_NDe]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction26',
reactants = ['CH2(S)(23)', 'C=[C]OO[C](CCC)CCOO(9247)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(1.31021e+06,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;C_pri] for rate rule [carbene;C_pri/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction27',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['OH(5)', 'C=[C]OOC1(CCCC)CCO1(29213)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(3.31e+11,'s^-1','*|/',1.74), n=0, Ea=(75.8559,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R3OO_SS;C_rad/NonDeC_intra;OOH] for rate rule [R3OO_SS;C_rad/NDMustO_intra;OOH]
Euclidian distance = 1.0
family: Cyclic_Ether_Formation"""),
)
reaction(
label = 'reaction28',
reactants = ['[CH2]C(CCC)(CCOO)OO[C]=C(29214)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(1.33e+08,'s^-1'), n=1.36, Ea=(157.318,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [cCs(-R!HR!H)CJ;CsJ-HH;C]
Euclidian distance = 0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction29',
reactants = ['[CH2]C(CCCC)(COO)OO[C]=C(29215)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(1.33e+08,'s^-1'), n=1.36, Ea=(157.318,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [cCs(-R!HR!H)CJ;CsJ-HH;C]
Euclidian distance = 0
family: 1,2_shiftC"""),
)
reaction(
label = 'reaction30',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=C1OOC1(CCCC)CCOO(29216)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;C_rad_out_single;Ypri_rad_out] for rate rule [R4_SSS;C_rad_out_Cs2;Ypri_rad_out]
Euclidian distance = 3.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction31',
reactants = ['C=[C]OO[C](CCCC)CCOO(29153)'],
products = ['C=[C]OOC(O)(CC[O])CCCC(29217)'],
transitionState = 'TS31',
kinetics = Arrhenius(A=(4.79e+10,'s^-1'), n=0, Ea=(110.039,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R3OOH_SS;C_rad_out_NonDe] for rate rule [R3OOH_SS;C_rad_out_NDMustO]
Euclidian distance = 1.0
family: intra_OH_migration"""),
)
reaction(
label = 'reaction32',
reactants = ['C=[C]O[O](110)', 'CCCC[C]CCOO(28384)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS32',
kinetics = Arrhenius(A=(1355.7,'m^3/(mol*s)'), n=1.40819, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [O_rad/NonDe;Birad]
Euclidian distance = 0
family: Birad_R_Recombination
Ea raised from -12.0 to 0 kJ/mol."""),
)
reaction(
label = 'reaction33',
reactants = ['H2CC(41)', 'CCCC[C](CCOO)O[O](28369)'],
products = ['C=[C]OO[C](CCCC)CCOO(29153)'],
transitionState = 'TS33',
kinetics = Arrhenius(A=(1355.7,'m^3/(mol*s)'), n=1.40819, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [O_rad/NonDe;Birad]
Euclidian distance = 0
family: Birad_R_Recombination
Ea raised from -12.0 to 0 kJ/mol."""),
)
network(
label = '4663',
isomers = [
'C=[C]OO[C](CCCC)CCOO(29153)',
],
reactants = [
('CH2CO(28)', 'CCCCC(=O)CCOO(16766)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '4663',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
|
# Main program for whole thing
from hero import hero
from minion import minion
import random
def main():
print("Welcome to heartstone chance calculator.")
print("Initializing teams.")
# Class initialization
hero1 = hero()
hero2 = hero()
# We will keep these as numbers right now
hero1Minions = 0
hero2Minions = 0
# These will be the lists of minions of each hero
hero1List = []
hero2List = []
damageCount = 0 # Amount of random damage we want to do
# Initializing hero's info
# Hero1 health part
hero1H = heroInput("one")
hero1.health = hero1H
# Hero1 minion part
hero1Minions = minionInput("one")
minionInitializer("one", hero1Minions, hero1List)
# Hero2 health part
hero2H = heroInput("two")
hero2.health = hero2H
# Hero2 minion part
hero2Minions = minionInput("two")
minionInitializer("two", hero2Minions, hero2List)
# Prints summary of data entered for both heroes' board states
summary(hero1, hero2, hero1List, hero2List)
# For editing minion health
edit(hero1List, hero2List)
damageCount = dmg()
heroPercentage(hero1, hero2, hero1List, hero2List, hero1, damageCount)
# Takes in team name and returns the input,
# an int for the hero's health
def heroInput(team):
health = 0
try:
health = int(input("Enter hero " + team + "'s health: "))
except ValueError:
print("Sorry not an integer.")
health = heroInput(team)
return health
# Takes in team name and returns the input,
# an int for the amount of minion's for that hero
def minionInput(team):
minions = 0
try:
minions = int(input("Enter amount of hero " + team + "'s minions: "))
except ValueError:
print("Sorry not an integer.")
minions = minionInput(team)
return minions
# Takes in team and number of minions, and
# initializes those number of minions for that team
def minionInitializer(team, number, minionList):
if team is "one":
for x in range(1, number + 1):
teamMinion = team + str(x)
minionList.append(minion(1, teamMinion))
if team is "two":
for x in range(1, number + 1):
teamMinion = team + str(x)
minionList.append(minion(1, teamMinion))
# Takes in 2 heros, and 2 sets of minions in lists
# and displays a summary of entered
def summary(hero1, hero2, minionList1, minionList2):
print("\n")
print("----------Summary----------")
print("Team One-------------------")
print("Hero 1: " + str(hero1.health) + " health")
for x in minionList1:
print("Minion " + str(minionList1.index(x) + 1) + ": " + str(x))
print("Team Two-------------------")
print("Hero 2: " + str(hero2.health) + " health")
for x in minionList2:
print("Minion " + str(minionList2.index(x) + 1) + ": " + str(x))
# Asks if person wants to edit minion health's
# Answer is y/n
def edit(minionList1, minionList2):
# this is for going through the list and editing the specific minion healths
def listIterate(minionList):
for x in minionList:
try:
minionChoice = int(input(
"Enter minion " + str(minionList.index(x) + 1) + "'s health: "))
except ValueError:
print("Sorry not an integer.")
minionChoice = int(input(
"Enter minion " + str(minionList.index(x) + 1) + "'s health: "))
x.health = minionChoice
# this is for choosing the team to edit
def team():
teamChoice = input("Which team do you want to edit? (1/2): ")
if teamChoice == "1":
print("Editing team 1.")
listIterate(minionList1)
elif teamChoice == "2":
print("Editing team 2.")
listIterate(minionList2)
else:
print("Please enter 1 or 2.")
team()
# for choosing whether or not you want to edit again
def again():
doAgain = input("Do you want to edit again? (y/n): ")
if doAgain == "y":
team()
elif doAgain == "n":
print("n chose.")
else:
print("Please enter y or n.")
again()
# Main portion of the edit
print("\n")
answer = input("Do you want to edit minion health? (y/n): ")
if answer == "y":
team()
again()
elif answer == "n":
print("Proceeding to chance query.")
else:
print("Please enter y or n.")
edit(minionList1, minionList2)
# Takes in two heroes, two lists, chosen hero, and random damage, and outputs
# percentage time hero is killed and percentage each time each minion is killed
# over n amount of trials (in this case 100000)
def heroPercentage(hero1, hero2, minionList1, minionList2, chosenHero, damage):
# We will put the potential targets in a dictionary
# With this set we will run n amount of trials to figure out
# the percentage of times a chosen hero is killed
# The original dictionary where everything is added
# Keys will be the minion name and values will be the minion's health
ourDict = {}
k = 0 # Amount of times the chosen hero is killed
d = 0 # Amount of times the other hero is killed
n = 100000 # Amound of trials we will perform
# These 2 lists will hold counters for each time a minion has died
list1 = [] # For hero 1
list2 = [] # For hero 2
# Initialize minion death counters to zero for each list
for x in minionList1:
list1.append(0)
for x in minionList2:
list2.append(0)
# Add everything to set
ourDict[hero1] = hero1.health
ourDict[hero2] = hero2.health
for x in minionList1:
ourDict[x.name] = x.health
for x in minionList2:
ourDict[x.name] = x.health
trialDict = ourDict # The set we will be performing trials on
# Actual trials
for x in range(n):
trialDict = ourDict.copy()
# Do damage to random targets
for y in range(1, damage + 1):
# Get a random minion in dictionary
randomTarget = random.choice(list(trialDict))
# Subtract one from the target's health
trialDict[randomTarget] = trialDict[randomTarget] - 1
# Check if chosen hero is dead. If they are break and add one to k
if trialDict[chosenHero] == 0:
k = k + 1
break
# Break if other hero is dead
if trialDict[hero1] == 0 or trialDict[hero2] == 0:
d = d + 1
break
# If target is dead remove from the dict
if trialDict[randomTarget] == 0:
# If target is minion from list1
if "one" in randomTarget:
# Get number at the end of the minion name
number = int(randomTarget[3:len(randomTarget)+1])
# Add one to its counter
list1[number-1] = list1[number-1] + 1
trialDict.pop(randomTarget, None)
# If target is minion from list2
else:
# Get number at end of minion name
number = int(randomTarget[3:len(randomTarget)+1])
# Add one to its counter
list2[number-1] = list2[number-1] + 1
trialDict.pop(randomTarget, None)
# Return the percentage of k/n
print("\n")
print("Results--------------------")
print("Hero 1 dies " + str((k/n) * 100) + "% of the time with " +
str(damage) + " random damage.")
print("Hero 1 Minions-------------")
for x in range(0, len(list1)):
print("Minion " + str(x + 1) + " dies " +
str((list1[x]/n) * 100) + "% of the time.")
print("---------------------------")
print("Hero 2 dies " + str((d/n) * 100) + "% of the time with " +
str(damage) + " random damage.")
print("Hero 2 Minions-------------")
for x in range(0, len(list2)):
print("Minion " + str(x + 1) + " dies " +
str((list2[x]/n) * 100) + "% of the time.")
# Asks how much damage and returns that number
def dmg():
count = 0
try:
print("\n")
count = int(input("How much random damage do you want to do?: "))
print("Proceeding to chance query.")
except ValueError:
print("Sorry not an integer.")
count = dmg()
return count
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api
from calendar import monthrange
class PosOrder(models.Model):
_inherit = 'pos.order'
x_rank_id = fields.Many2one('crm.customer.rank', "Rank")
@api.onchange('partner_id')
def onchange_partner(self):
self.x_rank_id = self.partner_id.x_rank_id.id if self.partner_id else False
# Chiết khấu Vip trong đơn hàng
def action_compute_order_discount(self):
if self.lines:
# for line in self.lines:
# if line.discount != 0 and not line.x_promotion_reason_id:
# line.update({'discount': 0})
# Lấy thông tin các sản phẩm được giảm giá ngoại lệ theo hạng VIP của KH
except_dict = {}
for product in self.partner_id.x_rank_id.except_product_ids:
except_dict[product.product_id.id] = product.discount
except_dict['%s_amount' % product.product_id.id] = product.max_amount
discount_service = self.partner_id.x_rank_id.discount_service
discount_product = self.partner_id.x_rank_id.discount_product
discount_except = len(self.partner_id.x_rank_id.except_product_ids)
for line in self.lines:
# if line.x_promotion_reason_id:
# continue
if line.product_id.x_card_type != 'none':
continue
if line.price_subtotal_incl == 0:
continue
# Sản phẩm thuộc ngoại lệ
if discount_except and line.product_id.id in except_dict:
key = '%s_amount' % line.product_id.id
x_discount = except_dict[line.product_id.id] * (line.price_subtotal_incl) / 100.0
if key in except_dict:
# Kiểm tra giới hạn số tiền tối đa
max_amount = except_dict[key]
if max_amount and max_amount < x_discount:
x_discount = max_amount
# Qui đổi số tiền ra phần trăm
line.discount = round(x_discount * 100.0 / (line.price_unit * line.qty), 4)
# Dịch vụ
elif discount_service > 0 and line.product_id.type == 'service':
line.discount += discount_service
# Sản phẩm
elif discount_product > 0 and line.product_id.type == 'product':
line.discount += discount_product
# def add_revenue(self, partner_id, revenue, date_order):
# ResPartnerRevenue = self.env['res.partner.revenue']
# # Create or update in res_partner_revenue
# last_date = monthrange(date_order.year, date_order.month)[1]
# date_revenue = date_order.strftime('%Y-%m')
# partner_revenue = ResPartnerRevenue.search([('partner_id', '=', partner_id),
# ('revenue_date', '>=', date_revenue + '-01'),
# ('revenue_date', '<=', date_revenue + '-' + str(last_date))],
# limit=1)
# if partner_revenue:
# new_revenue = partner_revenue.revenue + revenue
# partner_revenue.write({'revenue': new_revenue,
# 'revenue_date': date_order})
# else:
# ResPartnerRevenue.create({'partner_id': partner_id,
# 'revenue': revenue,
# 'revenue_date': date_order})
# self.env['partner.expected.revenue'].add_cumulative_revenue(partner_id)
# Phát sinh doanh thu
# @api.multi
# def action_customer_confirm_order(self):
# res = super(PosOrder, self).action_customer_confirm_order()
# self.add_revenue(self.partner_id.id, self.x_revenue, self.date_order)
# return res
# xử lý refund
# @api.multi
# def done_refund(self):
# res = super(PosOrder, self).done_refund()
# amount_revenue = self.x_revenue if self.x_revenue < 0 else -self.x_revenue
# self.add_revenue(self.partner_id.id, amount_revenue, self.date_order)
# return res
|
import tornado.web
class ImgSlide(tornado.web.UIModule):
def render(self,info):
return self.render_string('modules/img_slide.html', post_info=info) |
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
from django.db.models.base import Model
from django.db.models.deletion import DO_NOTHING
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.http import Http404
import datetime as dt
import geocoder
#from django_google_maps import fields as map_fields
# Create your models here.
class Profile(models.Model):
user=models.OneToOneField(User,on_delete=models.CASCADE)
profile_image=models.ImageField('profile_image/',blank=True)
bio=models.TextField( blank=True, null=True, default='My Bio')
# location=models.PointField()
def __str__(self):
return f'{self.user.username} Profile'
#return self.user.username
def save_profile(self):
self.save()
def delete_profile(self):
self.delete()
@receiver(post_save,sender=User)
def create_user_profile(sender,instance,created,**kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save,sender=User)
def save_user_profile(sender,instance,**kwargs):
print("signal activated---->>>", dir(instance))
instance.profile.save
class Post(models.Model):
name=models.CharField(max_length=50)
description=models.TextField()
image=models.ImageField('post_image/',blank=True)
# location=models.ForeignKey(Profile, related_name='profile',on_delete=DO_NOTHING,null=True,blank=True)
pub_date=models.DateTimeField(auto_now_add=True)
def save_post(self):
self.save()
def delete_post(self):
self.delete()
@classmethod
def get_post(cls):
post=cls.objects.all()
return post
@classmethod
def search_post(cls,search_term):
post=cls.objects.filter(post_name__icontain=search_term)
return post
@classmethod
def get_by_author(cls,author):
post=cls.objects.filter(author=author)
return post
@classmethod
def today_riot(cls):
today=dt.date.today()
posts=cls.objects.filter(pub_date__date=today)
return posts
@classmethod
def get_post(request,id):
try:
post=Post.objects.get(pk=id)
return post
except ObjectDoesNotExist:
raise Http404()
def __str__(self):
return self.name
class Location(models.Model):
name = models.CharField(max_length=60,unique=True)
class Meta:
ordering = ["name"]
@classmethod
def get_locations(cls):
locations = Location.objects.all()
return locations
@classmethod
def update_location(cls, id, value):
cls.objects.filter(id=id).update(image=value)
def save_location(self):
self.save()
def delete_location(self):
self.delete()
def __str__(self):
return self.name
class Hotspot(models.Model):
name= models.CharField(max_length=200)
location = models.ForeignKey(Location, related_name='location', on_delete=models.DO_NOTHING, null=True, blank=True)
city=models.CharField(max_length=100)
image=models.ImageField('hotspot_image',blank=True)
def save_hotspot(self):
self.save()
def delete_hotspot(self):
self.delete()
@classmethod
def get_hotspot(cls):
hotspot=cls.objects.all()
return hotspot
@classmethod
def search_hotspot(cls,search_term):
hotspot=cls.objects.filter(hotspot_name__icontain=search_term)
return hotspot
def __str__(self):
return self.name
class Work(models.Model):
name=models.CharField(max_length=50)
address=models.CharField(max_length=30)
city=models.CharField(max_length=50)
def __str__(self):
return self.name
class Hospital(models.Model):
name=models.CharField(max_length=50)
address=models.CharField(max_length=30)
city=models.CharField(max_length=50)
def __str__(self):
return self.name
class Police(models.Model):
name=models.CharField(max_length=50)
address=models.CharField(max_length=30)
city=models.CharField(max_length=50)
def __str__(self):
return self.name
class Data(models.Model):
country=models.CharField(max_length=100, null=True)
town=models.CharField(max_length=100,null=True)
location=models.PositiveBigIntegerField(null=True)
latitude=models.FloatField(default=0)
longitude=models.FloatField(default=0)
def __str__(self):
return self.country
class meta:
def save(self, *arg, **kwargs):
self.latitude=geocoder.osm(self.country).lat
self.longitude=geocoder.osm(self.country).lng
return super().save(*arg, **kwargs)
|
import csv
import re
from cassandra.cluster import Cluster
from datetime import *
cluster = Cluster(['172.17.0.2'])
session = cluster.connect('test')
sql = "BEGIN BATCH \n"
with open('gun_ownership.csv') as tsvfile:
reader = csv.reader(tsvfile)
header = False
counter = 0
for row in reader:
if not header:
header = True
else:
state = row[0]
gun_ownership = row[1]
population = row[2]
sql_single = 'INSERT INTO gun_ownership(state,gun_ownership,population) VALUES (' + '\''+state+'\'' + ','+str(gun_ownership)+','+str(population)+');\n'
sql += sql_single
counter += 1
if counter % 100 == 0:
sql += "APPLY BATCH;"
session.execute(sql)
sql = "BEGIN BATCH \n"
if sql != "BEGIN BATCH \n":
sql += "APPLY BATCH;"
session.execute(sql)
session.execute(sql)
print(counter)
cluster.shutdown() |
# try and except -- exception handling
a = 2
x = 2
while (x >-2):
try:
a/x
except ZeroDivisionError:
print("This is inside the exception")
print("{0},{1} - 0 division error".format(a,x))
finally :
print("this is executed always")
print("{0},{1} - 0 - always executes".format(a,x))
x = x-1
# enumerate the key and value in collections
"""
collections:
1. sequences : List and Tuple
2.sets : set
3.mappings: Dictionary
"""
lis = [1,25,6,8889,77,"agg"]
dic = {}
for i,value in enumerate(lis):
a,b = i,value # where i and value are in tuple(unpacking the tuple here)
a = "key"+str(i)
dic[a] = value
print(dic)
# class and objects, operator/method overloading
class ComputePower:
def __init__(self,ram,cpu):
self.ram = ram
self.cpu = cpu
def isEnough(self):
if self.ram > 8 and self.cpu > 2:
return "yes, it can handle!!"
else:
return "Rebuild the system!!"
# operator/method overloading
def __str__(self):
return "computing power : {0},{1}".format(self.ram,self.cpu)
def __repr__(self):
return "ComputePower({0},{1})".format(self.ram,self.cpu)
def __gt__(self,other):
if (self.ram > other.ram):
return "{0} is better than the {1}".format(self,other)
else:
return "{1} is better than the {0}".format(self,other)
obj = ComputePower(1,8)
obj.isEnough()
str(obj) # this will execute __str__()
obj # this will execute the __repr__() -- representation
dir(obj)
Asus = ComputePower(1,4)
HP = ComputePower(8,2)
print(Asus>HP)
# memory management
var1 = 10
var2 = var1
print("address of var1 = {0}\n address of var2 = {1}".format(id(var1),id(var2)))
print(var1 is var2) # check the address and return boolean
print(var1 == var2)# check the value and return true if equal
"""
Mutable objects or structures : List, Sets ,Dict
Immputable objects or structures : Int, Float, Bool,Tuple, String
"""
# Immutable objects and shared references
text = "Data is big."
data = text
def check_sharedref():
print("address of text = {0}\n address of data = {1}".format(id(text),id(data)))
if text is data:
print("shared reference")
else:
return "No shared reference"
check_sharedref()
data = text + "but can be interpreted wasily with the help of anlytical tools!!!"
check_sharedref()
# List is mutable and no shared reference when same values assigned to different variable names/identifier
li = [1,3,11]
li_dup = [1,3,11]
print("address of text = {0}\n address of data = {1}".format(id(li),id(li_dup)))
li = [1,1]
li_dup = li
print("address of text = {0}\n address of data = {1}".format(id(li),id(li_dup)))
li_dup.append(10) # this will effect both li and li_dup as they are sharing reference to the same memory location where the list object is stored
print(li,li_dup)
# Tuple is immutable sequence but sometimes elements of tuple are mutable
tu = (1,1)
tu[0] = 10 #integers are immutable, it should throw an error
tu = ([1,1],'a')
tu[0].append(20) # first element is list and it can be mutable
print(tu) #
# function use shared reference while passing a parameter
import sys
import ctypes
def concat(string):
print('reference count of string - sys',sys.getrefcount(string))
print('reference count of string - ctype',ctypes.c_long.from_address(id(string)))
new = string + "hello"
print("address of string = {0}\n address of new = {1}".format(id(string),id(new)))
print(new)
return
s = "hey!"
concat(s) # id(string) is copied and passed to the function's local variable string.
"""
module space: s --->hey(address = 111)
function space: string -----hey(address = 111)
new = 'hey! hello' ---->(address = 115)
"""
a = "abcd"
b = a # and b have shared references
sys.getrefcount(a)-1
b = a+ "abcds" # pointing to a new memory location
sys.getrefcount(a)-1
a = None
print(sys.getrefcount(a)-1 and a is None) # true
""" Sorted is a built in function which takes iterable and returns list in sorted order -->Ascending
"""
#custom sorting
def order_(a:str):
"""accept the string and assign some order to it"""
if a == 'a':
a = 100
elif a == 'b':
a = 99
elif a == 'z':
a = 0
return a
l = ['a','b','a','z']
sorted(l, key = lambda a: order_(a))
# randomized sorting using the random module
import random
lis = [2,1,4,785,445,56,4,56,885,44,588,6631,4475,4477888955547]
file = open(r"C:\Users\Naren\Desktop\random_sort.txt",'w+')
for i in range(10000):
file.write(str(sorted(lis,key = lambda a:random.random())) + '\n')
file.close()
# customized sorting using the function
lis = [1,2,3,4]
def sort_(a):
for _ in range(len(lis)):
if a >= 3:
a = 0
return a
sorted(lis,key = sort_) |
while True:
def grade(x):
if x in range(0, 101):
if x >= 90:
if x < 94:
return 'A-'
elif x < 97:
return 'A'
elif x <= 100:
return 'A+'
elif x >= 80:
if x < 84:
return 'B-'
elif x < 87:
return 'B'
elif x <= 89:
return 'B+'
elif x >= 70:
if x < 74:
return 'C-'
elif x < 78:
return 'C'
elif x <= 79:
return 'C+'
elif x >= 60:
if x < 64:
return 'D-'
elif x < 68:
return 'D'
elif x <= 69:
return 'D+'
else:
return 'F'
try:
score = int(input('Please enter your grade between 0 and 100\n'))
if score > 0 and score < 101:
print(grade(score))
break
except Exception as exc:
print(exc) |
"""
function helpers for user checkups
"""
from bson.objectid import ObjectId
from src.commons.errors import InvalidUser
def is_active(mongo, user_id):
"""
Check if a current user is active or not
:param mongo: Database connection
:param user_id: User ID
:return: bool
"""
user = get_user(mongo, {"_id": ObjectId(user_id)})
return user["active"]
def is_master(mongo, user_id):
"""
Validate if the user id correspond to the master admin user
:param mongo: Database connection
:param user_id: User id
:return: bool
"""
user = mongo.users.find_one()
return user and str(user["_id"]) == user_id
def ensure_manager(mongo, user_id, manager_id):
"""
Ensure that the manager of the user match with the given data
:param mongo: Database connection
:param user_id: Current user to ensure his manager
:param manager_id: Manager ID to be checked
:return: bool
"""
user = get_user(mongo, {"_id": ObjectId(user_id)})
if "manager_id" not in user:
return False
return str(user["manager_id"]) == manager_id
def get_user(mongo, filters=None, qry_fields=None):
"""
Get a single user record
:param mongo: Database connection
:param filters: User ID
:param qry_fields: Fields to return in query
:return: user record
"""
if not filters:
filters = {}
if not qry_fields:
user = mongo.users.find_one(filters)
else:
user = mongo.users.find_one(filters, qry_fields)
if not user:
raise InvalidUser()
return user
def get_all_users(mongo, filters=None, qry_fields=None):
"""
Return all available users according filters
:param mongo: Database Connection
:param filters: Data filters
:param qry_fields: Files to include or exclude in response
:return: list of users
"""
if not filters:
filters = {}
if not qry_fields:
return mongo.users.find(filters)
return mongo.users.find(filters, qry_fields)
|
#!/usr/bin/env python2.7
import pandas as pd
import os
def topXbinTrans(df,top):
'''
:param df: a dataframe with a single column
:return:outputs a dataframe with single column and same indecies as df, 3 is 'high, 2 is 'middle', and 1 is 'low'
'''
nrows = df.shape[0]
df = df.sort_values(ascending=False)
df.iloc[0:(top)] = 2
df.iloc[nrows-top:nrows] = 0 #
df.iloc[top:-top] = 1
return pd.Series(df)
def fromNodejs(parm):
return reflection(parm)
def reflection(parm):
'''
:param parm: {
"datapath": relfection_data_file_name
"toMapId" : 'sample' or 'feature'
"node_ids" = [ id1,...,idn ]
"out_file"=outputfile_path
}
:return: writes tab delimited output file, describing the highest and lowest node reflection
'''
#bigger matrix requires read in chunks haven't implemented yet.
#TODO: implement 'read in chunks' for large dataframes.
bigboy = (parm['toMapId'] == 'GtexFeature' or parm['toMapId'] == 'GtexSample')
if (bigboy):
print "Error: reflection not implemented for GTEx"
return 0
TOP = 150 #this should be an input later, need to talk to Yulia and Josh before getting fancy
fpath = str(parm['datapath'])
outpath = parm['out_file']
node_ids = parm['node_ids']
if not os.path.isfile(fpath):
print "Error:", fname, "not found, so reflection could not be computed\n"
return 0
#
'''
fpath = '/home/duncan/data/featureSpace/pancan12/reflection/clrscoresGtex_reflection.csv'
res.head()
ref_dat.head()
'''
# read in data to perform query on
ref_dat = pd.read_pickle(fpath)
#if going from features to samples then need to transpose matrix
if (parm['toMapId'] == 'sample' or parm['toMapId'] == 'GtexSample'):
ref_dat = ref_dat.transpose()
node_ids = parm['node_ids']
'''
node_ids = ref_dat.columns.values.tolist()[22:45]
'''
#grab row wise means and standard deviation for querry normalization
rowmu = ref_dat.mean(axis=1)
#take sd of each row
rowstd = ref_dat.std(axis=1)
#write the result to out_file name
res = ( (ref_dat[node_ids].mean(axis=1) - rowmu) / rowstd)#
#grab highest and lowest values and turn into: 3 highest , 2 middle, 1 lowest. 3 and 1 are of particular interest
res = topXbinTrans(res,TOP).to_dict()
#outpath='reflect_ex.tab'
#res.to_csv(outpath,sep='\t') #output is without header
return res
#return 0
if __name__ == "__main__" :
try:
# Get the return code to return
# Don't just exit with it because sys.exit works by exceptions.
return_code = reflection(sys.argv[1])
except:
traceback.print_exc()
# Return a definite number and not some unspecified error code.
return_code = 1
sys.exit(return_code)
|
# Quiz description URL:
#https://leetcode.com/problems/palindrome-linked-list/
# Point:
# 1.Use list
# Result
# Runtime: 2612 ms, faster than 5.00% of Python3 online submissions for Palindrome Linked List.
# Memory Usage: 47.3 MB, less than 37.11% of Python3 online submissions for Palindrome Linked List.
# class ListNode:
# def __init__(self, val=0, next=None):
# self.val = val
# self.next = next
class Solution:
def isPalindrome(self, head: ListNode) -> bool:
q: List = []
if not head:
return True
node = head
# 리스트 변환
while node is not None:
q.append(node.val)
node = node.next
# Palindrome check
while len(q) > 1:
if q.pop(0) != q.pop():
return False
return True
|
class Friend:
friends_dict = {}
def __init__(self, name):
Friend.friends_dict[name] = 'No party...'
self.name = name
# returns the string with the last invite that the person has received
# with the right place, day and time
show_invite = lambda self: Friend.friends_dict[self.name]
class Party:
'''Sends the invites to all friends.
Each friend an instance of class Friend.'''
def __init__(self, place):
self.place = place
self.observers = []
def add_friend(self, friend):
'''adds friend 'name' to the list of the 'observers'
(people, which will get the invitations, when the new party is scheduled)'''
self.observers.append(friend)
def del_friend(self, friend):
'''remove 'friend' from the 'observers' list'''
self.observers.remove(friend)
def send_invites(self, date):
'''sends the invites with the right day and time to the each person on the list of 'observers''''
for i in self.observers:
Friend.friends_dict[i.name] = f'{self.place}: {date}'
# Examples
party = Party("Midnight Pub")
nick = Friend("Nick")
john = Friend("John")
lucy = Friend("Lucy")
chuck = Friend("Chuck")
party.add_friend(nick)
party.add_friend(john)
party.add_friend(lucy)
party.send_invites("Friday, 9:00 PM")
party.del_friend(nick)
party.send_invites("Saturday, 10:00 AM")
party.add_friend(chuck)
john.show_invite() == "Midnight Pub: Saturday, 10:00 AM"
lucy.show_invite() == "Midnight Pub: Saturday, 10:00 AM"
nick.show_invite() == "Midnight Pub: Friday, 9:00 PM"
chuck.show_invite() == "No party..."
|
import requests
from twilio.rest import Client
VIRTUAL_TWILIO_NUMBER = "your virtual twilio number"
VERIFIED_NUMBER = "your own phone number verified with Twilio"
STOCK_NAME = "TSLA"
COMPANY_NAME = "Tesla Inc"
STOCK_ENDPOINT = "https://www.alphavantage.co/query"
NEWS_ENDPOINT = "https://newsapi.org/v2/everything"
STOCK_API_KEY = "YOUR OWN API KEY FROM ALPHAVANTAGE"
NEWS_API_KEY = "YOUR OWN API KEY FROM NEWSAPI"
TWILIO_SID = "YOUR TWILIO ACCOUNT SID"
TWILIO_AUTH_TOKEN = "YOUR TWILIO AUTH TOKEN"
#Get yesterday's closing stock price
stock_params = {
"function": "TIME_SERIES_DAILY",
"symbol": STOCK_NAME,
"apikey": STOCK_API_KEY,
}
response = requests.get(STOCK_ENDPOINT, params=stock_params)
data = response.json()["Time Series (Daily)"]
data_list = [value for (key, value) in data.items()]
yesterday_data = data_list[0]
yesterday_closing_price = yesterday_data["4. close"]
print(yesterday_closing_price)
#Get the day before yesterday's closing stock price
day_before_yesterday_data = data_list[1]
day_before_yesterday_closing_price = day_before_yesterday_data["4. close"]
print(day_before_yesterday_closing_price)
#Find the positive difference between 1 and 2. e.g. 40 - 20 = -20, but the positive difference is 20. Hint: https://www.w3schools.com/python/ref_func_abs.asp
difference = float(yesterday_closing_price) - float(day_before_yesterday_closing_price)
up_down = None
if difference > 0:
up_down = "🔺"
else:
up_down = "🔻"
#Work out the percentage difference in price between closing price yesterday and closing price the day before yesterday.
diff_percent = round((difference / float(yesterday_closing_price)) * 100)
print(diff_percent)
#If difference percentage is greater than 5 then print("Get News").
if abs(diff_percent) > 1:
news_params = {
"apiKey": NEWS_API_KEY,
"qInTitle": COMPANY_NAME,
}
news_response = requests.get(NEWS_ENDPOINT, params=news_params)
articles = news_response.json()["articles"]
#Use Python slice operator to create a list that contains the first 3 articles. Hint: https://stackoverflow.com/questions/509211/understanding-slice-notation
three_articles = articles[:3]
print(three_articles)
#Create a new list of the first 3 article's headline and description using list comprehension.
formatted_articles = [f"{STOCK_NAME}: {up_down}{diff_percent}%\nHeadline: {article['title']}. \nBrief: {article['description']}" for article in three_articles]
print(formatted_articles)
#Send each article as a separate message via Twilio.
client = Client(TWILIO_SID, TWILIO_AUTH_TOKEN)
for article in formatted_articles:
message = client.messages.create(
body=article,
from_=VIRTUAL_TWILIO_NUMBER,
to=VERIFIED_NUMBER
)
|
fibo = list()
n = int(input())
fibo.append(0)
fibo.append(1)
for i in range(2, n + 1):
fibo.append(fibo[i - 1] + fibo[i - 2])
print(fibo[n]) |
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic.base import RedirectView
from obs.views import CreateObservationView, ObservationDetailView, \
CreateObservationPhotoView
urlpatterns = [
url(r'^$', RedirectView.as_view(pattern_name="add"), name="home"),
url(r'^add/$', CreateObservationView.as_view(), name="add"),
url(r'^ob/(?P<slug>[\w\d]+)/$', ObservationDetailView.as_view(),
name='ob'),
url(r'^ob/(?P<slug>[\w\d]+)/add-photo/$',
CreateObservationPhotoView.as_view(), name="upload"),
url(r'^admin/', include(admin.site.urls)),
]
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
class BinaryTreeNode:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def insert_left(self, value):
self.left = BinaryTreeNode(data)
return self.left
def insert_right(self, value):
self.right = BinaryTreeNode(data)
return self.right
MAX_INT = 4294967296
MIN_INT = -4294967296
def bst_checker(root):
return bst_util(root, MIN_INT, MAX_INT)
def bst_util(node, mini, maxi):
if node is None:
return True
if node.data < maxi and node.data >= mini:
return bst_util(node.left, mini, node.data) and bst_util(node.right, node.data, maxi)
return False
root = BinaryTreeNode(4)
root.left = BinaryTreeNode(2)
root.right = BinaryTreeNode(5)
root.left.left = BinaryTreeNode(1)
root.left.right = BinaryTreeNode(3)
if (bst_checker(root)):
print "Is BST"
else:
print "Not a BST" |
from IPython import display
import matplotlib
import matplotlib.pyplot as plt
import gym
import numpy as np
def distant_render(env, agent, state_size):
s = env.reset()
done = False
img = plt.imshow(env.render(mode='rgb_array')) # only call this once
while not done:
img.set_data(env.render(mode='rgb_array')) # just update the data
display.display(plt.gcf())
display.clear_output(wait=True)
action = agent.next_action(np.reshape(s, [1, state_size]))
s, _, done, _ = env.step(action)
if done:
break
|
# Sublime Text plugin for Parinfer
# v0.8.0
# https://github.com/oakmac/sublime-text-parinfer
#
# More information about Parinfer can be found here:
# http://shaunlebron.github.io/parinfer/
#
# Copyright (c) 2015, Chris Oakman and other contributors
# Released under the ISC license
# https://github.com/oakmac/sublime-text-parinfer/blob/master/LICENSE.md
import sublime
import sublime_plugin
import functools
import re
import pprint
pp = pprint.PrettyPrinter(indent=4)
try:
# Python 2
from parinfer import indent_mode, paren_mode
except ImportError:
from .parinfer import indent_mode, paren_mode
try:
basestring
except NameError:
basestring = str
# constants
DEBOUNCE_INTERVAL_MS = 50
STATUS_KEY = 'parinfer'
PAREN_STATUS = 'Parinfer: Paren'
INDENT_STATUS = 'Parinfer: Indent'
PARENT_EXPRESSION_RE = re.compile(r"^\([a-zA-Z]")
SYNTAX_LANGUAGE_RE = r"([\w\d\s]*)(\.sublime-syntax)"
def get_syntax_language(view):
regex_res = re.search(SYNTAX_LANGUAGE_RE, view.settings().get("syntax"))
if regex_res:
return regex_res.group(1)
else:
None
# TODO: This is ugly, but I'm not sure how to avoid the ugly iteration lookup on each view.
comment_chars = {}
def get_comment_char(view):
comment_char = ';'
srclang = get_syntax_language(view)
if srclang in comment_chars:
comment_char = comment_chars[srclang]
else:
# Iterate over all shellVariables for the given view.
# 0 is a position so probably should be the current cursor location,
# but since we don't nest Clojure in other syntaxes, just use 0.
for var in view.meta_info("shellVariables", 0):
if var['name'] == 'TM_COMMENT_START':
comment_char = var['value'].strip()
comment_chars[srclang] = comment_char
break
return comment_char
def get_setting(view, key):
settings = view.settings().get('Parinfer')
if settings is None:
settings = sublime.load_settings('Parinfer.sublime-settings')
return settings.get(key)
def is_parent_expression(txt):
return re.match(PARENT_EXPRESSION_RE, txt) is not None
def find_start_parent_expression(lines, line_no):
line_no = line_no - 4
if line_no < 0:
return 0
idx = line_no - 1
while idx > 0:
if is_parent_expression(lines[idx]):
return idx
idx = idx - 1
return 0
def find_end_parent_expression(lines, line_no):
max_idx = len(lines) - 1
line_no = line_no + 4
if line_no > max_idx:
return max_idx
idx = line_no + 1
while idx < max_idx:
if is_parent_expression(lines[idx]):
return idx
idx = idx + 1
return max_idx
# this command applies the parinfer changes to the buffer
# NOTE: this needs to be in it's own command so we can override "undo"
class ParinferApplyCommand(sublime_plugin.TextCommand):
def run(self, edit, start_line = 0, end_line = 0, cursor_row = 0, cursor_col = 0, result_text = ''):
# get the current selection
current_sel = self.view.sel()
end_cursor = current_sel[0].end()
end_row, end_col = self.view.rowcol(end_cursor)
# update the buffer
start_point = self.view.text_point(start_line, 0)
end_point = self.view.text_point(end_line, 0)
region = sublime.Region(start_point, end_point)
self.view.replace(edit, region, result_text)
# re-apply their selection
pt1 = self.view.text_point(cursor_row, cursor_col)
pt2 = self.view.text_point(end_row, end_col)
self.view.sel().clear()
self.view.sel().add(sublime.Region(pt1, pt2))
# NOTE: This command inspects the text around the cursor to determine if we need
# to run Parinfer on it. It does not modify the buffer directly.
class ParinferInspectCommand(sublime_plugin.TextCommand):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# holds the text of the last update
self.last_update_text = None
self.comment_char = get_comment_char(self.view)
def run(self, edit):
current_view = self.view
current_status = current_view.get_status(STATUS_KEY)
# exit if Parinfer is not enabled on this view
if current_status not in (INDENT_STATUS, PAREN_STATUS):
return
whole_region = sublime.Region(0, current_view.size())
all_text = current_view.substr(whole_region)
lines = all_text.split("\n")
# add a newline at the end of the file if there is not one
if lines[-1] != "":
lines.append("")
selections = current_view.sel()
first_cursor = selections[0].begin()
cursor_row, cursor_col = current_view.rowcol(first_cursor)
start_line = find_start_parent_expression(lines, cursor_row)
end_line = find_end_parent_expression(lines, cursor_row)
start_point = current_view.text_point(start_line, 0)
end_point = current_view.text_point(end_line, 0)
region = sublime.Region(start_point, end_point)
text = current_view.substr(region)
modified_cursor_row = cursor_row - start_line
# exit early if there has been no change since our last update
if text == self.last_update_text:
return
parinfer_options = {
'cursorLine': modified_cursor_row,
'cursorX': cursor_col,
'commentChar': self.comment_char,
}
# specify the Parinfer mode
parinfer_fn = indent_mode
if current_status == PAREN_STATUS:
# TODO: add parinfer_options.cursorDx here
parinfer_fn = paren_mode
# run Parinfer on the text
result = parinfer_fn(text, parinfer_options)
if result['success']:
# save the text of this update so we don't have to process it again
self.last_update_text = result['text']
# update the buffer in a separate command if the text needs to be changed
if result['text'] != text:
cmd_options = {
'cursor_row': cursor_row,
'cursor_col': cursor_col,
'start_line': start_line,
'end_line': end_line,
'result_text': result['text'],
}
sublime.set_timeout(lambda: current_view.run_command('parinfer_apply', cmd_options), 1)
class ParinferParenOnOpen(sublime_plugin.TextCommand):
def run(self, edit):
# run Paren Mode on the whole file
whole_region = sublime.Region(0, self.view.size())
all_text = self.view.substr(whole_region)
result = paren_mode(all_text, None)
# TODO:
# - what to do when paren mode fails on a new file?
# show them a message?
# - warn them before applying Paren Mode changes?
if result['success']:
# update the buffer if we need to
if all_text != result['text']:
self.view.replace(edit, whole_region, result['text'])
# drop them into Indent Mode
self.view.set_status(STATUS_KEY, INDENT_STATUS)
class Parinfer(sublime_plugin.EventListener):
def __init__(self):
# stateful debounce counter
self.pending = 0
# Should we automatically start Parinfer on this file?
def should_start(self, view):
# False if filename is not a string
filename = view.file_name()
if isinstance(filename, basestring) is not True:
return False
# check the extensions in settings
for extension in get_setting(view, 'file_extensions'):
if filename.endswith(extension):
return True
# didn't find anything; do not automatically start Parinfer
return False
# debounce intermediary
def handle_timeout(self, view):
self.pending = self.pending - 1
if self.pending == 0:
view.run_command('parinfer_inspect')
# fires everytime the editor is modified; basically calls a
# debounced run_parinfer
def on_modified(self, view):
self.pending = self.pending + 1
sublime.set_timeout(
functools.partial(self.handle_timeout, view), DEBOUNCE_INTERVAL_MS)
# fires everytime the cursor is moved
def on_selection_modified(self, view):
self.on_modified(view)
# fires when a file is finished loading
def on_load(self, view):
# exit early if we do not recognize this file extension
if not self.should_start(view):
return
# run Paren Mode on the whole file
view.run_command('parinfer_paren_on_open')
class ParinferToggleOnCommand(sublime_plugin.TextCommand):
def run(self, edit):
# update the status bar
current_status = self.view.get_status(STATUS_KEY)
if current_status == INDENT_STATUS:
self.view.set_status(STATUS_KEY, PAREN_STATUS)
else:
self.view.set_status(STATUS_KEY, INDENT_STATUS)
class ParinferToggleOffCommand(sublime_plugin.TextCommand):
def run(self, edit):
# remove from the status bar
self.view.erase_status(STATUS_KEY)
# override undo
class ParinferUndoListener(sublime_plugin.EventListener):
def on_text_command(self, view, command_name, args):
# TODO: Only run in parinfer views?
# TODO: Simplify duplicated logic?
if command_name == 'undo':
# check to see if the last command was a 'parinfer_apply'
cmd_history = view.command_history(0)
# if so, run an extra "undo" to erase the changes
if cmd_history[0] == 'parinfer_apply':
view.run_command('undo')
# run "undo" as normal
elif command_name == 'redo':
# check to see if the command after next was a 'parinfer_apply'
cmd_history = view.command_history(2)
# if so, run an extra "redo" to erase the changes
if cmd_history[0] == 'parinfer_apply':
view.run_command('redo')
# run "redo" as normal
|
#!/usr/bin/env python
import os
import sys
def main(argv):
if not len(sys.argv) == 2:
print 'Usage: prout.py <src_directory>'
sys.exit(2)
##Need to create a list ptf directory that ends in .git
src_dir = str(sys.argv[1])
##Dirs to update with full path_name
lines_to_insert = list()
##create the file file-name
file_name = open("file-name", "a+")
for root, dirs, files in os.walk(src_dir, topdown=True):
for xml_file in files:
##print xml_file
##print os.path.join(root, xml_file);
direc_and_file_name_concat = os.path.join(root, xml_file).replace(src_dir + "/","");
##print direc_and_file_name_concat
line_to_ins = 'conf/project-name/' + direc_and_file_name_concat + '=>/opt/apache-tomcat6/conf/project-name/' + direc_and_file_name_concat
##print line_to_ins
lines_to_insert.append(line_to_ins)
for line_ins in lines_to_insert:
file_name.write(line_ins + "\n")
file_name.close()
if __name__ == "__main__":
main(sys.argv[1:])
|
from argparse import ArgumentParser
from datetime import datetime
from secrets import MYSQL_USER, MYSQL_PASSWD
import MySQLdb
import sys
HOST = "192.168.1.195"
DATABASE_NAME = "QnA"
db = MySQLdb.connect(host=HOST, user=MYSQL_USER, passwd=MYSQL_PASSWD, db=DATABASE_NAME)
cur = db.cursor()
class QnAArgParser(ArgumentParser):
def error(self, message):
sys.stderr.write('error: %s\n' %message)
self.print_help()
sys.exit(2)
def get_args():
parser = QnAArgParser()
parser.add_argument('-t', dest='question_text', help='The text needed to add a new question or change an existing one.')
parser.add_argument('-c', dest='changed_question_text', help='The text need to change an existing question.')
parser.add_argument('-d', action="store_true", dest='show_diff', help='Display diff of changes to question text over time.')
if len(sys.argv) == 1: # If no arguments were provided, then print help and exit.
parser.print_help()
sys.exit(1)
return parser.parse_args()
def addNewQuestion(new_text):
try:
sql = "INSERT INTO questions (question) VALUES (%s);"
cur.execute(sql, (new_text,))
except MySQLdb.IntegrityError:
print "This question already exists in the QnA database."
def getQuestionID(question_text):
sql = "SELECT id FROM questions WHERE question=%s";
cur.execute(sql, (question_text,))
results = None
try:
results = cur.fetchall()[0][0]
except IndexError:
results = False
return results
def addQuestionChange(original_question_text, changed_text):
original_question_id = getQuestionID(original_question_text)
if original_question_id == False:
print "ERROR: Cannot add a change for a non-existant question. Question \'%s\' does not exist." % (original_question_text)
else:
try:
sql = "INSERT INTO question_changes (question_id, changed_text) VALUES (%s, %s);"
cur.execute(sql, (original_question_id, changed_text))
except MySQLdb.IntegrityError:
print "This question change already exists in the QnA database."
def printQuestionDiff(question_id):
sql = "SELECT * FROM questions WHERE id=%s;"
cur.execute(sql, (question_id))
questions_results = cur.fetchall()
if len(questions_results) == 0:
print "No results for question ID %s" % (question_id)
print "results: " + str(cur.fetchall())
def printQuestionDiffStr(question_text):
CHANGEDTEXT_INDEX = 0
TIMESTAMP_INDEX = 1
question_id = getQuestionID(question_text)
if question_id == False:
print "ERROR: Cannot display diff for the non-existant question \'%s\' ." % (question_text)
else:
sql = "SELECT changed_text, timestamp FROM question_changes WHERE question_id=%s";
cur.execute(sql, (question_id,))
results = cur.fetchall()
# Print a nice header for the output. :)
print question_text
print '-' * len(question_text)
if len(results) == 0:
print "There are no changes for this question."
else:
for result in results:
print str(result[TIMESTAMP_INDEX]) + "\t" + result[CHANGEDTEXT_INDEX]
def main():
opts = get_args()
if not opts.question_text:
print "No question text provided. Please use the -t switch."
sys.exit(1)
if opts.changed_question_text:
addQuestionChange(opts.question_text, opts.changed_question_text)
elif opts.show_diff:
printQuestionDiffStr(opts.question_text)
else:
addNewQuestion(opts.question_text)
db.commit()
if __name__ == '__main__':
main() |
import pygame
import sys
import os
from pygame.locals import *
#from Maingame import *
windowWidth = 1280
windowHeight = 720
pygame.init()
Display = pygame.display.set_mode((windowWidth,windowHeight))
#screen
leftBorder = 1
rightBorder = 1
topBorder = 5
downBorder = 5
screenHeight = 768-topBorder-downBorder
screenWidth = 1024-rightBorder-leftBorder
#block
blockColumn = 18
blockRow = 10
blockHeight = screenHeight/blockRow
blockWidth = screenWidth/blockColumn
wallHeight = blockHeight/2
wallThickness = blockWidth/4
#map
Map = pygame.image.load(os.path.join('House_Arrest_Map','Floor.png'))
Map = pygame.transform.scale(Map,(int(screenWidth),int(screenHeight)))
Block = pygame.image.load(os.path.join('House_Arrest_Map','Crate_Wood_01.png'))
Block = pygame.transform.scale(Block,(int(blockWidth),int(blockHeight)))
bed = pygame.image.load(os.path.join('House_Arrest_Map','bed.png'))
bed = pygame.transform.scale(bed,(int(3*blockWidth),int(2*blockHeight)))
bWall = pygame.image.load(os.path.join('House_Arrest_Map','B.png'))
bWall = pygame.transform.scale(bWall,(int(wallThickness),int(blockHeight)))
mWall = pygame.image.load(os.path.join('House_Arrest_Map','M.png'))
mWall = pygame.transform.scale(mWall,(int(wallThickness),int(blockHeight)))
tWall = pygame.image.load(os.path.join('House_Arrest_Map','T.png'))
tWall = pygame.transform.scale(tWall,(int(wallThickness),int(wallHeight)))
lWall = pygame.image.load(os.path.join('House_Arrest_Map','L.png'))
lWall = pygame.transform.scale(lWall,(int(blockWidth),int(wallHeight)))
cWall = pygame.image.load(os.path.join('House_Arrest_Map','C.png'))
cWall = pygame.transform.scale(cWall,(int(blockWidth),int(wallHeight)))
rWall = pygame.image.load(os.path.join('House_Arrest_Map','R.png'))
rWall = pygame.transform.scale(rWall,(int(blockWidth),int(wallHeight)))
level =[" T T bM ",
" bbb MTMbb TbMb ",
" bbb MMMbb MbBbbb ",
" CCC BM<CT M CCCC",
" CC CB Bb< T ",
"CC CCCCCC Mbb ",
" bb TCCCT |CBCCC",
" T<CT B M ",
" bb M <CC Bbb ",
" bb Bb b "]
class drawMap:
def drawMap(self):
Display.blit(Map,(0+leftBorder,0+topBorder))
for x in range(0,blockRow,1):
for y in range(0,blockColumn,1):
if level[x][y] == "b":
Display.blit(Block,((blockWidth*y+leftBorder),(blockHeight*x+topBorder)))
if level[x][y] == "B":
Display.blit(bWall,((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder)))
vWallRect = pygame.Rect((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder),wallThickness,blockHeight)
if level[x][y] == "M":
Display.blit(mWall,((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder)))
vWallRect = pygame.Rect((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder),wallThickness,blockHeight)
if level[x][y] == "T":
Display.blit(tWall,((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+wallHeight+topBorder)))
vWallRect = pygame.Rect((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder),wallThickness,blockHeight)
if level[x][y] == "L":
Display.blit(lWall,((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder)))
hWallRect = pygame.Rect((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder),blockWidth,wallHeight)
if level[x][y] == "C":
Display.blit(cWall,((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder)))
hWallRect = pygame.Rect((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder),blockWidth,wallHeight)
if level[x][y] == "R":
Display.blit(rWall,((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder)))
hWallRect = pygame.Rect((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder),blockWidth,wallHeight)
if level[x][y] == "<":
Display.blit(cWall,((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder)))
hWallRect = pygame.Rect((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder),blockWidth,wallHeight)
Display.blit(bWall,((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder)))
vWallRect = pygame.Rect((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder),wallThickness,blockHeight)
if level[x][y] == "|":
Display.blit(tWall,((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+wallHeight+topBorder)))
vWallRect = pygame.Rect((blockWidth*y+leftBorder-wallThickness/2),(blockHeight*x+topBorder),wallThickness,blockHeight)
Display.blit(cWall,((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder)))
hWallRect = pygame.Rect((blockWidth*y+leftBorder),(blockHeight*x+wallHeight+topBorder),blockWidth,wallHeight)
Display.blit(bed,(1*blockWidth+leftBorder,1*blockHeight+topBorder))
bedRect = pygame.Rect(1*blockWidth+leftBorder,1*blockHeight+topBorder,3*blockWidth,2*blockHeight)
|
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from recordtype.models import RecordType, RecordTypeForm
# Create your views here.
def index(request):
recordtype_list = RecordType.objects.all().order_by('label')
paginator = Paginator(recordtype_list, 10)
page = request.GET.get('page')
try:
recordtypes = paginator.page(page)
except PageNotAnInteger:
recordtypes = paginator.page(1)
except EmptyPage:
recordtypes = paginator.page(paginator.num_pages)
return render( request, 'recordtype/index.html', {
'recordtypes': recordtypes,
})
def add(request):
if request.method == 'POST':
form = RecordTypeForm(request.POST)
if form.is_valid():
new_rt = form.save(commit=False)
new_rt.label = form.cleaned_data.get('label').decode('utf-8').upper()
new_rt.save()
return HttpResponseRedirect('/recordtype/')
else:
form = RecordTypeForm()
return render( request, 'recordtype/add.html', {
'form': form,
})
def delete(request, recordtype_id):
try:
recordtype = RecordType.objects.get(pk=recordtype_id)
except RecordType.DoesNotExist:
raise Http404
recordtype.delete()
return HttpResponseRedirect('/recordtype/')
|
# -*- coding: utf-8 -*-
"""Visibility settings for use with entities."""
from verta._internal_utils import documentation
from ._org_custom import OrgCustom
from ._org_default import OrgDefault
from ._private import Private
from ._workspace_default import _WorkspaceDefault
documentation.reassign_module(
[
OrgCustom,
OrgDefault,
Private,
],
module_name=__name__,
)
|
import sys
import ipdb
import os
sys.path.append("/home/m/MediaMonitor")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "MediaMonitor.settings")
from monitor.lib import share_getter
from monitor.models import Link, LinkStats
from django.utils import timezone
import json
import django
django.setup()
import datetime
def get_all_valid_links():
"""
Get all links with valid Time To Live value
"""
all_links = Link.objects.all()
valid_links = [ link for link in all_links if link.ttl > timezone.now() ]
return valid_links
def get_facebook_and_twitter_stats():
"""
For all valid links get statistics from Facebook and
Twitter APIs and save them as JSON into DB
"""
django.setup()
json_en = json.JSONEncoder()
links = get_all_valid_links()
stats_getter = share_getter.StatsGetter([link.uri for link in links], lag=1)
stats = stats_getter.get_stats()
for i, link in enumerate(links):
ls = LinkStats()
ls.link_id = link.id
ls.link_uri = link.uri
ls.time = stats[i]['time']
ls.fb_21 = json_en.encode(stats[i]['fb_21'])
ls.fb_rest = '{}' #json_en.encode(stats[i]['fb_rest'])
ls.twitter = json_en.encode(stats[i]['twitter'])
ls.save()
if __name__ == "__main__":
get_facebook_and_twitter_stats()
|
from django.apps import AppConfig
class SystemauthenticationConfig(AppConfig):
name = 'systemAuthentication'
|
#coding=utf-8
#用户注册的视图
from control.base.base_handler import BaseHandler
from models.models import PersonalTable
from models.db.dbsession import dbSession
class RegisterHandler(BaseHandler):
def get(self):
self.render("user/register.html")
def post(self):
username = self.get_argument("name", "")
password = self.get_argument("pass", "")
if not username and not password:
self.write(u"用户名或密码输入有错误")
# 先查询数据库是否已经存在该用户
search_name = dbSession.query(PersonalTable).filter_by(username=username).first()
# search_name = user_model.by_name(username)
if search_name:
self.write(u"该用户名已经存在,不能重复注册")
else:
user = PersonalTable()
user.username = username
user.password = password
self.db.add(user)
self.db.commit()
self.write(u"注册成功") |
from math import sqrt
def miller_test(m, base):
k, q = 0, m - 1
while q % 2 == 0:
q >>= 1
k += 1
r, i = pow(base, q, m), 0
while True:
if (i == 0 and r == 1) or (i >= 0 and r == m - 1):
return False
i += 1
r = pow(r, 2, m)
if i >= k:
return True
def is_prime(p):
for i in [2, 3, 5, 7, 11, 13, 17, 19, 23, 29]:
if p == i:
return True
if miller_test(p, i):
return False
return True
def is_trimo(n):
r = int(sqrt(n))
if r * r == n and is_prime(r):
return 1
else:
return 0
if __name__ == '__main__':
print(is_trimo(91))
|
#! /usr/bin/env python
import sys, re, os, socket, time, shutil, string, optparse
# 'set' is new in Python 2.4, but almost exists in 2.3
version = string.split(string.split(sys.version)[0], ".")
if '2' == version[0] and '3' == version[1]:
from sets import Set as set
# allowed options for config file
allowed = set (['dir', 'host', 'hostequiv', 'backupcmd', 'user'])
allowedMultiple = set (['hostequiv']) # allowed multiple entries
# extentions that end in this should not be broken up from the
# previous term
notSufficientExt = set (['gz'])
# extentions that should be sent as text (when 'txt' is 'auto')
txtExtentions = set (['cc', 'c', 'hh', 'h', 'py', 'pl', 'pm', 'tcl'])
# Image extentions (files that will get <img src> tag)
imageExtentions = set (['png', 'gif', 'jpg'])
# convertedToImage extentions (files that will be matched with an
# image)
convertedToImageExtentions = set (['eps', 'ai', 'xls', 'pdf'])
# initialize variables for command line parcing
dest = ""
files = []
flags = {}
# default options
# Have script decide if it should append '.txt' or not
flags['txt'] = 'auto'
# If true, it uses this command to backup files. If you do not have
# this script, just set this to "" instead.
flags['backup'] = "backupNeeded"
#################
# Parce options #
#################
parser = optparse.OptionParser ("Usage: %prog [options] dest file1 [file2]")
parser.add_option ("--tag", dest="tag", type="string",
help="Adds '_TAG' to filenames")
parser.add_option ("--keepPath", dest="keepPath", action="store_true",
default=False, help="Keeps the full path of files")
parser.add_option ("--list", dest="asList", action="store_true",
default=False, help="Outputs hrefs as list")
parser.add_option ("--ignoreIf", dest="iif", type="string",
help="Will not copy file if matches regex in 'IF'. " +
"Comma separated, '%' as wildcard")
parser.add_option ("--dirMatch", dest="match", type="string",
help="Calls self wrapped with 'find' and 'xargs. Matches "+
"file extentions in 'MATCH'")
parser.add_option ("--noCopy", dest="noCopy", action="store_true",
default=False, help="Does everything except copy files " +
"- Useful with --dirMatch and debugging")
parser.add_option ("--txt", dest="txt", action="store_true", default=False,
help="add '.txt' to copied file")
parser.add_option ("--notxt", dest="notxt", action="store_true",
default=False,
help="prevents '.txt' to copied file")
parser.add_option ("--dm", dest="dm", action="store_true", default=False)
(options, args) = parser.parse_args()
neededArgs = 2
# if we are using the dirMatch option, then we do not need to list
# files as that will be done for us.
if options.match and not options.dm:
neededArgs = 1
if len(args) < neededArgs:
parser.error("Must provide destination and at least one file.")
dest = args[0]
files = args[1:]
# setup find/xargs command
if options.match and not options.dm:
extentions = options.match.split(",")
command = "find . -regex '.+\\(" + "\\|".join (extentions) + \
"\\)' | xargs "+ " ".join(sys.argv) + " --dm "
print command
os.system ( command )
sys.exit(0)
ignoreList = []
if options.iif:
pieces = options.iif.split(",")
for piece in pieces:
ignoreList.append( '^' + re.sub( r'%', r'.*', piece ) + '$' )
if options.txt:
flags['txt'] = True
elif options.notxt:
flags['txt'] = False
else:
flags['txt'] = 'auto'
# get the environment variable $HOME
config = "%s/.moveFiles.config" % os.environ.get ('HOME', ".")
# Try opening the file
try:
configFile = open (config, "r")
except:
print "Could not open configuration file '%s'. Aborting." % config
sys.exit(1)
##############
# parce file #
##############
name = ""
configInfo = {}
for line in configFile:
line = re.sub (r'#.+$', '', line) # remove comment characters
line = line.strip() # remove inner and outer spaces
if not line: continue # don't bother with empty lines
# + name definition
nameMatch = re.search (r'^\+\s+(\S+)', line)
if nameMatch:
name = nameMatch.group (1)
configInfo[name] = {}
continue
attribMatch = re.search (r'^\-\s+(\S+)\s+(\S.+)', line)
# - attribute definition
if attribMatch:
if not name:
print "Problem with '%': Must define name before attributes:" % \
config
print "%s\n" % line
sys.exit(1)
attribName = attribMatch.group (1).lower()
value = attribMatch.group (2)
if attribName not in allowed :
print "Attribute '%s' not allowed in %s" % (attribName, config)
print "%s\n" % line
sys.exit(1)
if attribName in allowedMultiple:
configInfo[name][attribName] = re.split (r'\s+', value)
else:
configInfo[name][attribName] = value
continue
##################################################################
# check to make sure that we have what we need for the requested #
# definition #
##################################################################
if dest not in configInfo:
print "Information about destination '%s' is not "\
"available in '%s'. Aborting." % (dest, config)
sys.exit(1)
hostInfo = configInfo[dest]
if 'dir' not in hostInfo or 'host' not in hostInfo:
print "Sufficient information ('dir', 'host') are not available "\
"for destination '%' in '%'. Aborting." % (dest, config)
# get current host name
thisComputer = socket.gethostname()
hostComputer = hostInfo['host']
# are we on the target computer
onHost = False
if thisComputer == hostComputer:
onHost = True
else:
# do we have equivalent host matching
equivs = hostInfo.get ('hostequiv')
if equivs:
for regex in equivs:
if re.match (regex, thisComputer):
onHost = True
break
######################
# Prepare The Files! #
######################
monthKey = time.strftime("%y%m")
dateKey = time.strftime("%y%m%d_")
targetDir = hostInfo['dir'] + "/" + monthKey
filesMap = {}
for sourceFile in files:
if not os.path.exists (sourceFile):
print "File '%s' does not exist. Aborting." % sourceFile
sys.exit(1)
ignoreFile = False
for ignoreRegex in ignoreList:
if re.match (ignoreRegex, sourceFile):
print "Ignoring file '%s'" % sourceFile
ignoreFile = True
break
if ignoreFile: continue
if options.keepPath:
# trim leading './' if it is there
displayName = re.sub (r'^./', '', sourceFile)
targetFile = dateKey + re.sub (r'/', '__', displayName)
else:
displayName = os.path.basename (sourceFile)
targetFile = dateKey + displayName
######################
# Add Additional Tag #
######################
tag = options.tag
if tag:
# break up the filename
parts = targetFile.split (".")
# should we consider combining the last two pieces?
last = len (parts) - 1
if last >= 2 and parts[last].lower() in notSufficientExt:
parts[last -1:] = [parts[last - 1] + "." + parts[last]]
last = len (parts) - 1
# do we have enough pieces to bother messing with this
if last >= 1:
parts[last - 1] = parts[last - 1] + "_" + tag
else:
# if we're here, then we can't find any extention to
# remove before placing the tag, so we'll just put it on
# the end.
parts[last] = parts[last] + "_" + tag
targetFile = ".".join (parts)
#######################
# Should we add .txt? #
#######################
txt = flags.get ('txt')
if 'auto' == txt:
extMatch = re.search (r'\.([^\.]+)$', targetFile)
if extMatch:
if extMatch.group(1).lower() in txtExtentions:
txt = True
else:
txt = False
else:
txt = False
if txt:
targetFile = targetFile + ".txt"
# Add the full path
fullTargetFile = targetDir + "/" + targetFile
# store the results
filesMap[displayName] = (sourceFile, fullTargetFile, targetFile)
################################
## Copy Files to New Location ##
################################
for displayName, Tuple in filesMap.iteritems():
if options.noCopy: continue
sourceFile = Tuple[0]
fullTargetFile = Tuple[1]
targetFile = Tuple[2]
#################################
# Copy the file to the log area #
#################################
if onHost:
# We are on the host, so everything is local
backup = flags.get ('backup')
if backup:
cmd = "cd %s; %s %s >/dev/null 2>&1" % \
(targetDir, backup, targetFile)
os.system (cmd)
shutil.copy2 (sourceFile, fullTargetFile)
else:
# Remote copy
user = hostInfo.get ('user', "")
if user:
user = "-l " + user
backup = hostInfo.get ('backupcmd', 0)
if backup:
cmd = "ssh %s %s \"cd %s; %s %s\" >/dev/null 2>&1" \
% (user, hostComputer, \
targetDir, \
backup, targetFile)
os.system (cmd)
cmd = "scp %s %s %s:%s" % (user, sourceFile, \
hostComputer, fullTargetFile)
os.system (cmd)
###############################
## Print out HTML references ##
###############################
print
if (options.asList):
print "<ul>"
for displayName, Tuple in filesMap.iteritems():
sourceFile = Tuple[0]
fullTargetFile = Tuple[1]
targetFile = Tuple[2]
if (options.asList):
print " <li>",
# Get the extention
extMatch = re.search (r'\.([^\.]+)$', targetFile)
printed = False
if extMatch:
extention = extMatch.group(1).lower()
if extention in imageExtentions:
print '<p><img src="%s"><br>' % targetFile
printed = True
elif extention in convertedToImageExtentions:
# Do we have a file of the same
extReg = "%s%s" % (extention, "$")
for imgExt in imageExtentions:
imageFile = re.sub (extReg, imgExt, displayName, re.IGNORECASE)
if imageFile in filesMap:
print '<p><a href="%s"><img src="%s"></a><br>' % \
(targetFile, imageFile)
printed = True
if not printed:
print '<a href="%s"><tt>%s</tt></a>' % (targetFile, displayName)
if (options.asList):
print "</ul>"
|
import ret
import pandas
# coding=utf-8
import xml.dom.minidom
L = []
for i in range(3):
path = '../static/string' + str(i) + '.xml'
# 打开xml文档
dom = xml.dom.minidom.parse(path)
# 得到文档元素对象
root = dom.documentElement
cc = dom.getElementsByTagName('string')
for c in cc:
L.append(c.firstChild.data)
# print(len(L))
# for l in L:
# print(l)
# print("div---")
s = set(L)
# print(len(s))
# for tmp in s:
# print(tmp)
with open('../static/keywords', encoding='utf-8') as f:
result = f.read()
Lios = result.split('\n')
s_ios = set(Lios)
s = s | s_ios
L = list(s)
L2 = []
for i in range(len(L)):
L2.append(str(i))
Lios = []
df = pandas.DataFrame(L, index=L2, columns=['中文'])
print(df)
df.to_excel('e:/tmp.xlsx')
|
def find_peak(nums):
left, right = 0, len(nums) - 1
if nums[right] >= nums[left]:
return right
while right > left:
m = (right + left) // 2
if nums[m] >= nums[left]:
left = m
else:
right = m
if right - left == 1:
if nums[right] > nums[left]:
return right
return left
return left
class Solution:
def search(self, nums: List[int], target: int) -> int:
if nums == []:
return -1
n = len(nums)
start, end = 0, n-1
if nums[start] > nums[end]:
peak = find_peak(nums)
if target < nums[start]:
start = peak + 1
else:
end = peak
if target < nums[start]:
return -1
if target > nums[end]:
return -1
while start < end:
mid = (start + end) // 2
if nums[mid] > target:
end = mid
else:
start = mid
if end - start == 1:
if nums[start] == target:
return start
if nums[end] == target:
return end
return -1
return start
|
#1 question:写了很多用作数据结构的类,不想写太多烦人的__init__函数
#2 solution:
import math
class Structure1:
_fields = []
def __init__(self, *args):
if len(args) != len(self._fields):
raise TypeError('Expected {} arguments'.format(len(self._fields)))
for name, value in zip(self._fields, args):
setattr(self, name, value) # 动态添加init函数的赋值
#然后使用你的类继承这个基类
class Stock(Structure1):
_fields = ['name', 'shares', 'price']
class Point(Structure1):
_fields = ['x', 'y']
class Circle(Structure1):
_fields = ['radius']
#使用这些类的示例
s = Stock('ACME', 50, 91.1)
p = Point(2,4)
c = Circle(4.5)
s2 = Stock('ACME',50) # 会抛出异常
|
import pygame
import pygame.rect
# Citation: https://www.pygame.org/docs/ref/sprite.html#pygame.sprite.Sprite
# I am using this for solving the collide problem, I learnt how to use it from this website.
class Tile(pygame.sprite.Sprite):
def __init__(self, color=0, width=0, height=0,position_x = 0,position_y = 0,file = None):
pygame.sprite.Sprite.__init__(self)
self.width = width
self.height = height
self.color = color
self.image = pygame.image.load(file)
self.rect = self.image.get_rect()
self.rect.x = position_x
self.rect.y = position_y
self.previous_x = 0
self.previous_y = 0
def check_collision(self,previous_pos,tile_group):
# Citation: https://www.pygame.org/docs/ref/sprite.html#pygame.sprite.spritecollide
# learning how to use
collision_list = pygame.sprite.spritecollide(self, tile_group, False)
if len(collision_list) > 1:
self.rect.x = self.previous_x
self.rect.y = self.previous_y
return False
def update_pos(self, new_x, new_y,current_offset_x,current_offset_y):
self.previous_x = self.rect.x
self.previous_y = self.rect.y
self.rect.x = new_x
self.rect.y = new_y
# check for clicked position
if current_offset_x < -self.width/4*3 and -self.height/4*3 < current_offset_y < -self.height/4:
self.rect.x = self.previous_x + 128
elif current_offset_x > -self.width/4 and -self.height/4*3 < current_offset_y < -self.height/4:
self.rect.x = self.previous_x - 128
elif current_offset_y < -self.height/4*3 and -self.width/4*3 < current_offset_x < -self.width/4 :
self.rect.y = self.previous_y + 128
elif current_offset_y > -self.width/4 and -self.width/4*3 < current_offset_x < -self.width/4:
self.rect.y = self.previous_y - 128
if self.rect.x + self.width >= 512:
self.rect.x = 512 - self.width
elif self.rect.x <= 0:
self.rect.x = 0
if self.rect.y + self.height >= 640:
self.rect.y = 640 - self.height
elif self.rect.y<=0:
self.rect.y = 0
|
import os
from datetime import datetime
from flask import current_app
from flask import Blueprint
from flask import flash
from flask import g
from flask import redirect
from flask import render_template
from flask import request
from flask import url_for
from werkzeug.exceptions import abort
from rtracker.db import get_db
bp = Blueprint("reports", __name__, url_prefix='/reports')
@bp.route("/view")
def view():
reports = []
for root, dirs, files in os.walk(current_app.config["REPORT_PATH"]):
for file in files:
report = {"full_path": os.path.join(root, file), "name": file}
reports.append(report)
print(reports)
return render_template("/reports/view.html", reports=reports)
@bp.route("/create")
def create():
file_name = "checkedout-{}.txt".format(datetime.now().strftime("%Y%m%d"))
report_file = os.path.join(current_app.config["REPORT_PATH"], file_name)
error = None
db = get_db()
items = db.execute("select item_id, location from items where location != ''").fetchall()
if len(items) < 1:
error = "Nothing checked out. No report generated."
else:
with open(report_file, "w") as f:
for item in items:
f.write("{},{}\n".format(item["item_id"], item["location"]))
flash(error)
return redirect(url_for("reports.view"))
|
import bluetooth
print("performing inquiry...")
nearby_devices = bluetooth.discover_devices(
duration=8, lookup_names=True, flush_cache=True, lookup_class=False)
print("found %d devices" % len(nearby_devices))
for addr, name in nearby_devices:
try:
print(" %s - %s" % (addr, name))
except UnicodeEncodeError:
print(" %s - %s" % (addr, name.encode('utf-8', 'replace'))) |
import logging
import time
import os
import pytest
from helpers.cluster import ClickHouseCluster
from helpers.utility import generate_values
from helpers.wait_for_helpers import wait_for_delete_inactive_parts
from helpers.wait_for_helpers import wait_for_delete_empty_parts
from pyhdfs import HdfsClient
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CONFIG_PATH = os.path.join(
SCRIPT_DIR, "./_instances/node/configs/config.d/storage_conf.xml"
)
def create_table(cluster, table_name, additional_settings=None):
node = cluster.instances["node"]
create_table_statement = """
CREATE TABLE {} (
dt Date, id Int64, data String,
INDEX min_max (id) TYPE minmax GRANULARITY 3
) ENGINE=MergeTree()
PARTITION BY dt
ORDER BY (dt, id)
SETTINGS
storage_policy='hdfs',
old_parts_lifetime=0,
index_granularity=512,
temporary_directories_lifetime=1
""".format(
table_name
)
if additional_settings:
create_table_statement += ","
create_table_statement += additional_settings
node.query(create_table_statement)
FILES_OVERHEAD = 1
FILES_OVERHEAD_PER_COLUMN = 2 # Data and mark files
FILES_OVERHEAD_DEFAULT_COMPRESSION_CODEC = 1
FILES_OVERHEAD_METADATA_VERSION = 1
FILES_OVERHEAD_PER_PART_WIDE = (
FILES_OVERHEAD_PER_COLUMN * 3
+ 2
+ 6
+ FILES_OVERHEAD_DEFAULT_COMPRESSION_CODEC
+ FILES_OVERHEAD_METADATA_VERSION
)
FILES_OVERHEAD_PER_PART_COMPACT = (
10 + FILES_OVERHEAD_DEFAULT_COMPRESSION_CODEC + FILES_OVERHEAD_METADATA_VERSION
)
@pytest.fixture(scope="module")
def cluster():
try:
cluster = ClickHouseCluster(__file__)
cluster.add_instance(
"node", main_configs=["configs/config.d/storage_conf.xml"], with_hdfs=True
)
logging.info("Starting cluster...")
cluster.start()
logging.info("Cluster started")
fs = HdfsClient(hosts=cluster.hdfs_ip)
fs.mkdirs("/clickhouse")
logging.info("Created HDFS directory")
yield cluster
finally:
cluster.shutdown()
def wait_for_delete_hdfs_objects(cluster, expected, num_tries=30):
fs = HdfsClient(hosts=cluster.hdfs_ip)
while num_tries > 0:
num_hdfs_objects = len(fs.listdir("/clickhouse"))
if num_hdfs_objects == expected:
break
num_tries -= 1
time.sleep(1)
assert len(fs.listdir("/clickhouse")) == expected
@pytest.fixture(autouse=True)
def drop_table(cluster):
node = cluster.instances["node"]
fs = HdfsClient(hosts=cluster.hdfs_ip)
hdfs_objects = fs.listdir("/clickhouse")
print("Number of hdfs objects to delete:", len(hdfs_objects), sep=" ")
node.query("DROP TABLE IF EXISTS hdfs_test SYNC")
try:
wait_for_delete_hdfs_objects(cluster, 0)
finally:
hdfs_objects = fs.listdir("/clickhouse")
if len(hdfs_objects) == 0:
return
print(
"Manually removing extra objects to prevent tests cascade failing: ",
hdfs_objects,
)
for path in hdfs_objects:
fs.delete(path)
@pytest.mark.parametrize(
"min_rows_for_wide_part,files_per_part",
[(0, FILES_OVERHEAD_PER_PART_WIDE), (8192, FILES_OVERHEAD_PER_PART_COMPACT)],
)
def test_simple_insert_select(cluster, min_rows_for_wide_part, files_per_part):
create_table(
cluster,
"hdfs_test",
additional_settings="min_rows_for_wide_part={}".format(min_rows_for_wide_part),
)
node = cluster.instances["node"]
values1 = generate_values("2020-01-03", 4096)
node.query("INSERT INTO hdfs_test VALUES {}".format(values1))
assert (
node.query("SELECT * FROM hdfs_test order by dt, id FORMAT Values") == values1
)
fs = HdfsClient(hosts=cluster.hdfs_ip)
hdfs_objects = fs.listdir("/clickhouse")
print(hdfs_objects)
assert len(hdfs_objects) == FILES_OVERHEAD + files_per_part
values2 = generate_values("2020-01-04", 4096)
node.query("INSERT INTO hdfs_test VALUES {}".format(values2))
assert (
node.query("SELECT * FROM hdfs_test ORDER BY dt, id FORMAT Values")
== values1 + "," + values2
)
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + files_per_part * 2
assert (
node.query("SELECT count(*) FROM hdfs_test where id = 1 FORMAT Values") == "(2)"
)
def test_alter_table_columns(cluster):
create_table(cluster, "hdfs_test")
node = cluster.instances["node"]
fs = HdfsClient(hosts=cluster.hdfs_ip)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-03", 4096))
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(
generate_values("2020-01-03", 4096, -1)
)
)
node.query("ALTER TABLE hdfs_test ADD COLUMN col1 UInt64 DEFAULT 1")
# To ensure parts have merged
node.query("OPTIMIZE TABLE hdfs_test")
assert node.query("SELECT sum(col1) FROM hdfs_test FORMAT Values") == "(8192)"
assert (
node.query("SELECT sum(col1) FROM hdfs_test WHERE id > 0 FORMAT Values")
== "(4096)"
)
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE + FILES_OVERHEAD_PER_COLUMN,
)
node.query(
"ALTER TABLE hdfs_test MODIFY COLUMN col1 String",
settings={"mutations_sync": 2},
)
assert node.query("SELECT distinct(col1) FROM hdfs_test FORMAT Values") == "('1')"
# and file with mutation
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE + FILES_OVERHEAD_PER_COLUMN + 1,
)
node.query("ALTER TABLE hdfs_test DROP COLUMN col1", settings={"mutations_sync": 2})
# and 2 files with mutations
wait_for_delete_hdfs_objects(
cluster, FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE + 2
)
def test_attach_detach_partition(cluster):
create_table(cluster, "hdfs_test")
node = cluster.instances["node"]
fs = HdfsClient(hosts=cluster.hdfs_ip)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-03", 4096))
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-04", 4096))
)
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 2
node.query("ALTER TABLE hdfs_test DETACH PARTITION '2020-01-03'")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(4096)"
wait_for_delete_empty_parts(node, "hdfs_test")
wait_for_delete_inactive_parts(node, "hdfs_test")
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD
+ FILES_OVERHEAD_PER_PART_WIDE * 2
- FILES_OVERHEAD_METADATA_VERSION,
)
node.query("ALTER TABLE hdfs_test ATTACH PARTITION '2020-01-03'")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 2
node.query("ALTER TABLE hdfs_test DROP PARTITION '2020-01-03'")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(4096)"
wait_for_delete_empty_parts(node, "hdfs_test")
wait_for_delete_inactive_parts(node, "hdfs_test")
wait_for_delete_hdfs_objects(cluster, FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE)
node.query("ALTER TABLE hdfs_test DETACH PARTITION '2020-01-04'")
node.query(
"ALTER TABLE hdfs_test DROP DETACHED PARTITION '2020-01-04'",
settings={"allow_drop_detached": 1},
)
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(0)"
wait_for_delete_empty_parts(node, "hdfs_test")
wait_for_delete_inactive_parts(node, "hdfs_test")
wait_for_delete_hdfs_objects(cluster, FILES_OVERHEAD)
def test_move_partition_to_another_disk(cluster):
create_table(cluster, "hdfs_test")
node = cluster.instances["node"]
fs = HdfsClient(hosts=cluster.hdfs_ip)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-03", 4096))
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-04", 4096))
)
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 2
node.query("ALTER TABLE hdfs_test MOVE PARTITION '2020-01-04' TO DISK 'hdd'")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE
node.query("ALTER TABLE hdfs_test MOVE PARTITION '2020-01-04' TO DISK 'hdfs'")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 2
def test_table_manipulations(cluster):
create_table(cluster, "hdfs_test")
node = cluster.instances["node"]
fs = HdfsClient(hosts=cluster.hdfs_ip)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-03", 4096))
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-04", 4096))
)
node.query("RENAME TABLE hdfs_test TO hdfs_renamed")
assert node.query("SELECT count(*) FROM hdfs_renamed FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 2
node.query("RENAME TABLE hdfs_renamed TO hdfs_test")
assert node.query("CHECK TABLE hdfs_test FORMAT Values") == "(1)"
node.query("DETACH TABLE hdfs_test")
node.query("ATTACH TABLE hdfs_test")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 2
node.query("TRUNCATE TABLE hdfs_test")
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(0)"
wait_for_delete_empty_parts(node, "hdfs_test")
wait_for_delete_inactive_parts(node, "hdfs_test")
wait_for_delete_hdfs_objects(cluster, FILES_OVERHEAD)
def test_move_replace_partition_to_another_table(cluster):
create_table(cluster, "hdfs_test")
node = cluster.instances["node"]
fs = HdfsClient(hosts=cluster.hdfs_ip)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-03", 4096))
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-04", 4096))
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(
generate_values("2020-01-05", 4096, -1)
)
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(
generate_values("2020-01-06", 4096, -1)
)
)
assert node.query("SELECT sum(id) FROM hdfs_test FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(16384)"
hdfs_objects = fs.listdir("/clickhouse")
assert len(hdfs_objects) == FILES_OVERHEAD + FILES_OVERHEAD_PER_PART_WIDE * 4
create_table(cluster, "hdfs_clone")
node.query("ALTER TABLE hdfs_test MOVE PARTITION '2020-01-03' TO TABLE hdfs_clone")
node.query("ALTER TABLE hdfs_test MOVE PARTITION '2020-01-05' TO TABLE hdfs_clone")
assert node.query("SELECT sum(id) FROM hdfs_test FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(8192)"
assert node.query("SELECT sum(id) FROM hdfs_clone FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_clone FORMAT Values") == "(8192)"
# Number of objects in HDFS should be unchanged.
hdfs_objects = fs.listdir("/clickhouse")
for obj in hdfs_objects:
print("Object in HDFS after move", obj)
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD * 2
+ FILES_OVERHEAD_PER_PART_WIDE * 4
- FILES_OVERHEAD_METADATA_VERSION * 2,
)
# Add new partitions to source table, but with different values and replace them from copied table.
node.query(
"INSERT INTO hdfs_test VALUES {}".format(
generate_values("2020-01-03", 4096, -1)
)
)
node.query(
"INSERT INTO hdfs_test VALUES {}".format(generate_values("2020-01-05", 4096))
)
assert node.query("SELECT sum(id) FROM hdfs_test FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(16384)"
hdfs_objects = fs.listdir("/clickhouse")
for obj in hdfs_objects:
print("Object in HDFS after insert", obj)
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD * 2
+ FILES_OVERHEAD_PER_PART_WIDE * 6
- FILES_OVERHEAD_METADATA_VERSION * 2,
)
node.query("ALTER TABLE hdfs_test REPLACE PARTITION '2020-01-03' FROM hdfs_clone")
node.query("ALTER TABLE hdfs_test REPLACE PARTITION '2020-01-05' FROM hdfs_clone")
assert node.query("SELECT sum(id) FROM hdfs_test FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(16384)"
assert node.query("SELECT sum(id) FROM hdfs_clone FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_clone FORMAT Values") == "(8192)"
# Wait for outdated partitions deletion.
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD * 2
+ FILES_OVERHEAD_PER_PART_WIDE * 4
- FILES_OVERHEAD_METADATA_VERSION * 2,
)
node.query("DROP TABLE hdfs_clone SYNC")
assert node.query("SELECT sum(id) FROM hdfs_test FORMAT Values") == "(0)"
assert node.query("SELECT count(*) FROM hdfs_test FORMAT Values") == "(16384)"
# Data should remain in hdfs
hdfs_objects = fs.listdir("/clickhouse")
for obj in hdfs_objects:
print("Object in HDFS after drop", obj)
wait_for_delete_hdfs_objects(
cluster,
FILES_OVERHEAD
+ FILES_OVERHEAD_PER_PART_WIDE * 4
- FILES_OVERHEAD_METADATA_VERSION * 2,
)
|
# coding: utf-8
# data_sender_node.py
import multiprocessing as mp
from node import Node
class DataSenderNode(Node):
"""
データを読み取るノードを表す基底クラス
"""
def __init__(self, process_manager, msg_queue):
"""コンストラクタ"""
super().__init__(process_manager, msg_queue)
# 入力を処理するためのプロセス
self.initialize_process_handler()
def initialize_process_handler(self):
"""入力を処理するためのプロセスを作成"""
# ノードの状態を格納するディクショナリstate_dictは
# プロセス間で共有されているため, メソッドの引数として指定しない
self.process_handler = mp.Process(target=self.update, args=())
# デーモンプロセスに設定
# 親プロセスが終了するときに子プロセスを終了させる
self.process_handler.daemon = True
def update(self):
"""入力を処理して状態を更新"""
raise NotImplementedError()
def run(self):
"""ノードの実行を開始"""
self.process_handler.start()
|
# Declaration of a few floats
x = 7.0
y = 21.0
z = 45.0
# Basic math operations using floats
addInt = x + z
subtractInt = z - y
multiplyInt = x * y
divideInt = y / x
modInt = z % x
powerInt = y ** z
everythingInt = (y ** 7) / ((z + y) - (-(y * x)))
# Printing of the math calculations
print(addInt)
print(subtractInt)
print(multiplyInt)
print(divideInt)
print(modInt)
print(powerInt)
print(everythingInt)
print()
# Demonstration of decimal accuracy
result = 444 / 777
print(result)
# Formats variable with width of 1 and accuracy of 3 decimal places
print("The rounded result is{r: 1.3f}".format(r = result)) |
def rot13(mess):
alphabet = 'abcdefghijklmnopqrstuvwxyz'
encrypted = ''
for char in mess:
if char == ' ':
encrypted = encrypted + ' '
else:
rotated_index = alphabet.index(char) + 13
if rotated_index < 26:
encrypted = encrypted + alphabet[rotated_index]
else:
encrypted = e |
# Check two condition at same time
# and ,Or Operator
name='raj'
age =19
if name=='raj' and age==29:
print("condition is true ")
else :
print("condition is false")
name="rahul"
age=23
if name=="ram" or age==23:
print("Conditiopn is true .....")
else:
print("Condition is false ...")
# name='raj'
# age=23
# if name=='rohit' or age==23:
# print("welcome to raj")
# else:
# print("you are not eligible")
|
# Generated by Django 2.2.5 on 2019-10-14 14:59
import STTEAPI.models
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Administrador',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(blank=True, max_length=100, null=True)),
],
options={
'db_table': 'Administrador',
'managed': True,
},
),
migrations.CreateModel(
name='Alumno',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('matricula', models.CharField(blank=True, max_length=100)),
('nombre', models.CharField(blank=True, max_length=100, null=True)),
('siglas_carrera', models.CharField(blank=True, max_length=100, null=True)),
('carrera', models.CharField(blank=True, max_length=100, null=True)),
('semestre', models.IntegerField(blank=True, null=True)),
('periodo_de_aceptacion', models.CharField(blank=True, max_length=100, null=True)),
('posible_graduacion', models.CharField(blank=True, max_length=100, null=True)),
('fecha_de_nacimiento', models.CharField(blank=True, max_length=100, null=True)),
('nacionalidad', models.CharField(blank=True, max_length=100, null=True)),
],
options={
'db_table': 'Alumno',
'managed': True,
},
),
migrations.CreateModel(
name='Carta',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(blank=True, max_length=255, null=True)),
('descripcion', models.TextField(blank=True, null=True)),
('fecha_creacion', models.DateTimeField(auto_now_add=True, null=True)),
('fecha_modificacion', models.DateTimeField(auto_now_add=True, null=True)),
('administrador', models.ForeignKey(db_column='administrador', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Administrador')),
],
options={
'db_table': 'Carta',
'managed': True,
},
),
migrations.CreateModel(
name='Paso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(blank=True, max_length=100, null=True)),
('numero', models.IntegerField(blank=True, null=True)),
('mostrar', models.BooleanField(blank=True, null=True)),
],
options={
'db_table': 'Paso',
'managed': True,
},
),
migrations.CreateModel(
name='Proceso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(blank=True, max_length=100, null=True)),
('num_pasos', models.IntegerField(blank=True, null=True)),
('fecha_creacion', models.DateTimeField(auto_now_add=True, null=True)),
('fecha_modificacion', models.DateTimeField(auto_now_add=True, null=True)),
('administrador', models.ForeignKey(db_column='administrador', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Administrador')),
],
options={
'db_table': 'Proceso',
'managed': True,
},
),
migrations.CreateModel(
name='Usuario',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.CharField(max_length=100, unique=True)),
('password', models.CharField(max_length=100)),
('last_login', models.DateTimeField(blank=True, null=True)),
('is_superuser', models.BooleanField(default=False)),
('is_staff', models.BooleanField(default=False)),
('is_active', models.BooleanField(default=True)),
('date_joined', models.DateTimeField(default=django.utils.timezone.now)),
('es_admin', models.BooleanField(default=False)),
('es_alumno', models.BooleanField(default=False)),
],
options={
'db_table': 'Usuario',
'managed': True,
},
managers=[
('objects', STTEAPI.models.MyUserManager()),
],
),
migrations.CreateModel(
name='Tramitealumno',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha_creacion', models.DateTimeField(auto_now_add=True, null=True)),
('fecha_modificacion', models.DateTimeField(auto_now_add=True, null=True)),
('numero_ticket', models.IntegerField(default=0, unique=True)),
('encuesta', models.IntegerField(default=0)),
('administrador', models.ForeignKey(db_column='administrador', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Administrador')),
('alumno', models.ForeignKey(db_column='alumno', on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Alumno')),
('paso', models.ForeignKey(db_column='paso', on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Paso')),
('proceso', models.ForeignKey(db_column='proceso', on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Proceso')),
],
options={
'db_table': 'TramiteAlumno',
'managed': True,
},
),
migrations.AddField(
model_name='paso',
name='proceso',
field=models.ForeignKey(db_column='proceso', on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Proceso'),
),
migrations.CreateModel(
name='Documento',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(blank=True, max_length=100, null=True)),
('fecha', models.DateTimeField(blank=True, default=django.utils.timezone.now, null=True)),
('contenido', models.TextField(blank=True, null=True)),
('administrador', models.ForeignKey(db_column='administrador', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Administrador')),
],
options={
'db_table': 'Documento',
'managed': True,
},
),
migrations.CreateModel(
name='CartaAlumno',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('fecha_creacion', models.DateTimeField(auto_now_add=True, null=True)),
('fecha_terminacion', models.DateTimeField(auto_now_add=True, null=True)),
('administrador', models.ForeignKey(db_column='administrador', null=True, on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Administrador')),
('alumno', models.ForeignKey(db_column='alumno', on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Alumno')),
('carta', models.ForeignKey(db_column='carta', on_delete=django.db.models.deletion.DO_NOTHING, to='STTEAPI.Carta')),
],
options={
'db_table': 'CartaAlumno',
'managed': True,
},
),
migrations.AddField(
model_name='alumno',
name='usuario',
field=models.ForeignKey(db_column='usuario', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='administrador',
name='usuario',
field=models.ForeignKey(db_column='usuario', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
#Leetcode 739. Daily Temperatures
#Monotonic Stack - build a decreasing stack while finding next greater/larger element
class Solution:
def dailyTemperatures(self, T: List[int]) -> List[int]:
if len(T) == 0:
return [0]
nextWarmerTemp = [0] * len(T)
stack = []
for i in range(len(T)):
while stack and T[stack[-1]] < T[i]:
index = stack.pop()
nextWarmerTemp[index] = i - index
stack.append(i)
return nextWarmerTemp |
from flask import request, jsonify, abort
from flask_restful import Resource, reqparse
from carstore3000.models.cars import CarModel, CarSchema
parser = reqparse.RequestParser()
parser.add_argument("car_id", type=int)
parser.add_argument("color_slug", type=str)
parser.add_argument("door_count", type=int)
parser.add_argument("engine_displacement", type=int)
parser.add_argument("engine_power", type=int)
parser.add_argument("fuel_type", type=str)
parser.add_argument("maker", type=str)
parser.add_argument("manufacture_year", type=int)
parser.add_argument("mileage", type=int)
parser.add_argument("model", type="str")
parser.add_argument("price_eur", type=float)
parser.add_argument("seat_count", type=int)
parser.add_argument("transmission", type=str)
class CarsRoutes(Resource):
# TODO authentication
def post(self):
request.get_json(force=True)
args = parser.parse_args()
return jsonify({"status": "success"})
def get(self):
# TODO: force json even for empty query
# request.get_json(force=True)
args = parser.parse_args()
args = {k: v for k, v in args.items() if v is not None}
schema = CarSchema()
if args:
cars = CarModel.query.filter_by(**args).all()
else:
cars = CarModel.query.all()
if not cars:
res = {}
else:
res = [schema.dump(i) for i in cars]
return jsonify(res)
# TODO authentication
def put(self):
return jsonify({"status": "success"})
# TODO authentication
def delete(self):
return jsonify({"status": "success"})
|
import os, random, discord, sys, getopt, time
from dotenv import load_dotenv
from urllib.request import urlopen, Request
from discord.ext.commands import Bot
from discord.ext import commands
load_dotenv()
def main(argv):
client = discord.Client()
# intents = discord.Intents().all()
bot = commands.Bot(command_prefix="t!")
TOKEN = sys.argv[1]
print(TOKEN)
# print('please error') (debug)
@client.event
async def on_ready():
# print('made it to client.event') (debug)
await client.change_presence(activity=discord.Game('with kick commands lmao | t!help'))
print("We have logged in as {}".format(client))
@client.event
async def on_message(message):
# print('made it to on_message') (for debug)
## if message.content.startswith('t!numberguess'):
## player = message.author
## def check(m):
## print('checking new message')
## return message.content.startswith('yes') and m .author == player
## print(player)
## await message.channel.send('Discord, yes or no?')
## msg = await client.wait_for('message', check = check, timeout=60)
## await message.channel.send('poopybutt -nieko')
#@bot.command()
#async def join(ctx):
#channel = ctx.author.voice.channel
#await channel.connect()
#@bot.command()
#async def leave(ctx):
#await ctx.voice_client.disconnect()
if message.content == 't!check':
for member in ctx.guild.members:
id = member.id
await message.channel.send(id)
if message.content == 't!kick':
@client.command()
@client.command.has_permissions(administrator=True)
async def kick(ctx, member: discord.Member):
await member.kick()
await ctx.send(f"{member.name} has been kicked by {ctx.author.name}!")
await log_channel.send(f"{ctx.author.name} has kicked {member.display_name}")
#doing respond thing when you dont have admin
##@client.command()
##elif @client.command.has_permissions(administrator=False):
##await message.channel.send("You need to be an admin to use this command")
if message.content == 't!help':
info = discord.Embed(title="Commands list:", description="All existing commands", color=0x28e038)
info.add_field(name=":star_struck:`t!inspiration`", value="makes you go :O", inline=False)
info.add_field(name=":1234:`t!randommath`", value="random math. adds numbers -45000-45000", inline=True)
info.add_field(name=":thinking:`t!randomquotes`", value="random quotes, made by yours truly", inline=True)
info.add_field(name=":ab:`t!cvf y=mx+b`", value="replace y=mx+b with numbers (Don't make y or x a number, and don't add spaces)", inline=False)
info.add_field(name=":ping_pong:`t!ping`", value="would send the time it takes to run the code but it doeSNT WORK", inline=False)
info.set_footer(text='[sumbit ideas and issues](https://github.com/zTheroy/theroy-discord-bot/issues)')
await message.channel.send(embed=info)
if message.content == 't!randomquotes':
response = random.choice(random_quotes)
await message.channel.send(response)
if message.content == 't!randommath':
lst = ['+', '-', '*', '/']
math_embed = discord.Embed(title="math", description="math answer", color=0x11d43b)
x = random.randint(-45000,45000)
y = random.randint(-45000,45000)
random_operator=random.randint(0,len(lst)-1)
if lst[random_operator] == '+':
answer = x + y
math_output = '{} + {} = {}'.format(x,y,answer)
elif lst[random_operator] == '-':
answer = x - y
math_output = '{} - {} = {}'.format(x,y,answer)
elif lst[random_operator] == '*':
answer = x * y
math_output = '{} * {} = {}'.format(x,y,answer)
elif lst[random_operator] == '/':
answer = x / y
math_output = '{} / {} = {}'.format(x,y,answer)
elif x < 0 and y < 0 and lst[random_operator] == '-':
y = abs(y)
answer = x + y
math_output = '{} + {} = {}'.format(x,y,answer)
print(y)
math_embed.add_field(name="oh my god its your math answer", value=math_output, inline=False)
await message.channel.send(embed=math_embed)
if message.content == 't!inspiration':
random_quote = Request('https://api.forismatic.com/api/1.0/?method=getQuote&lang=en&format=jsonp&jsonp=?', headers={'User-Agent': 'Mozilla/5.0'})
page = urlopen(random_quote)
html_bytes = page.read()
html = html_bytes.decode("utf-8")
#print(html)
html = html.split(":")
inspiration = html[1].split("quoteAuthor")
inspiration = inspiration[0]
inspiration = inspiration[:-3]
await message.channel.send(inspiration)
if message.content.startswith('t!cvf'):
message_content = message.content.split()
arg = message_content[1]
lst = arg.split("=")
for x in lst:
if "+" in x:
y = lst[0]
temp=lst[1].split("+")
mx = temp[0]
answer = temp[1]
elif "-" in x:
y = lst[0]
temp=lst[1].split("-")
mx = temp[0]
answer = temp[1]
if "+" in arg:
cvf_answer="Youre ansswer do be: {} - {} = {} :flushed:".format(mx, y, answer)
else:
cvf_answer="Youre ansssswer do be: {} + {} = {} :flushed:".format(mx, y, answer)
await message.channel.send(cvf_answer)
if message.content.startswith('t!ping'):
start = time.time()
latency = message.channel.created_at
end = time.time()
execution_time = end - start
#embedding execution time and latency under
ping_title = discord.Embed(title="pong :ping_pong:",color=0x005ef5)
ping_title.add_field(name="Execution Time", value=(f"{execution_time} seconds"), inline=False)
ping_title.add_field(name="Latency", value=str(round(client.latency*1000))+" ms", inline=False)
await message.channel.send(embed=ping_title)
client.run(TOKEN)
if __name__ == "__main__":
main(sys.argv[1:])
|
# ___________________________________________________________________________
#
# Parapint
# Copyright (c) 2020
# National Technology and Engineering Solutions of Sandia, LLC
# Under the terms of Contract DE-NA0003525 with National Technology and
# Engineering Solutions of Sandia, LLC, the U.S. Government retains certain
# rights in this software.
# This software is distributed under the 3-clause BSD License.
# ___________________________________________________________________________
import pyomo.environ as pe
from pyomo import dae
import parapint
import numpy as np
from mpi4py import MPI
import math
import logging
import argparse
from pyomo.common.timing import HierarchicalTimer
import csv
import os
"""
Run this example with, e.g.,
mpirun -np 4 python -m mpi4py burgers.py --nfe_x 30 --end_t 4 --nfe_t_per_t 1600 --nblocks 4 --method psc
"""
comm: MPI.Comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
if rank == 0:
logging.basicConfig(level=logging.INFO)
class Args(object):
def __init__(self):
self.nfe_x = 50
self.nfe_t = 200
self.end_t = 1
self.nblocks = 4
self.method = 'psc'
def parse_arguments(self):
parser = argparse.ArgumentParser()
parser.add_argument('--nfe_x', type=int, required=True, help='number of finite elements for x')
parser.add_argument('--end_t', type=int, required=True, help='end time')
parser.add_argument('--nfe_t_per_t', type=int, required=False, default=1600, help='number of finite elements for t per unit time')
parser.add_argument('--method', type=str, required=True, help='either "psc" for parallel schur-complement or "fs" for full-space serial')
parser.add_argument('--nblocks', type=int, required=True, help='number of time blocks for schur complement')
args = parser.parse_args()
self.nfe_x = args.nfe_x
self.end_t = args.end_t
self.nfe_t = args.nfe_t_per_t * args.end_t
self.nblocks = args.nblocks
self.method = args.method
if self.method not in {'psc', 'fs'}:
raise ValueError('method should be either "psc" for parallel schur-complement or "fs" for full-space serial')
def build_burgers_model(nfe_x=50, nfe_t=50, start_t=0, end_t=1, add_init_conditions=True):
dt = (end_t - start_t) / float(nfe_t)
start_x = 0
end_x = 1
dx = (end_x - start_x) / float(nfe_x)
m = pe.Block(concrete=True)
m.omega = pe.Param(initialize=0.02)
m.v = pe.Param(initialize=0.01)
m.r = pe.Param(initialize=0)
m.x = dae.ContinuousSet(bounds=(start_x, end_x))
m.t = dae.ContinuousSet(bounds=(start_t, end_t))
m.y = pe.Var(m.x, m.t)
m.dydt = dae.DerivativeVar(m.y, wrt=m.t)
m.dydx = dae.DerivativeVar(m.y, wrt=m.x)
m.dydx2 = dae.DerivativeVar(m.y, wrt=(m.x, m.x))
m.u = pe.Var(m.x, m.t)
def _y_init_rule(m, x, t):
if x <= 0.5 * end_x:
return 1 * round(math.cos(2*math.pi*t))
return 0
m.y0 = pe.Param(m.x, m.t, default=_y_init_rule)
def _upper_x_bound(m, t):
return m.y[end_x, t] == 0
m.upper_x_bound = pe.Constraint(m.t, rule=_upper_x_bound)
def _lower_x_bound(m, t):
return m.y[start_x, t] == 0
m.lower_x_bound = pe.Constraint(m.t, rule=_lower_x_bound)
def _upper_x_ubound(m, t):
return m.u[end_x, t] == 0
m.upper_x_ubound = pe.Constraint(m.t, rule=_upper_x_ubound)
def _lower_x_ubound(m, t):
return m.u[start_x, t] == 0
m.lower_x_ubound = pe.Constraint(m.t, rule=_lower_x_ubound)
def _lower_t_bound(m, x):
if x == start_x or x == end_x:
return pe.Constraint.Skip
return m.y[x, start_t] == m.y0[x, start_t]
def _lower_t_ubound(m, x):
if x == start_x or x == end_x:
return pe.Constraint.Skip
return m.u[x, start_t] == 0
if add_init_conditions:
m.lower_t_bound = pe.Constraint(m.x, rule=_lower_t_bound)
m.lower_t_ubound = pe.Constraint(m.x, rule=_lower_t_ubound)
# PDE
def _pde(m, x, t):
if t == start_t or x == end_x or x == start_x:
e = pe.Constraint.Skip
else:
last_t = m.t.prev(t)
e = m.dydt[x, t] - m.v * m.dydx2[x, t] + m.dydx[x, t] * m.y[x, t] == m.r + m.u[x, last_t]
return e
m.pde = pe.Constraint(m.x, m.t, rule=_pde)
# Discretize Model
disc = pe.TransformationFactory('dae.finite_difference')
disc.apply_to(m, nfe=nfe_t, wrt=m.t, scheme='BACKWARD')
disc.apply_to(m, nfe=nfe_x, wrt=m.x, scheme='CENTRAL')
# Solve control problem using Pyomo.DAE Integrals
def _intX(m, x, t):
return (m.y[x, t] - m.y0[x, t]) ** 2 + m.omega * m.u[x, t] ** 2
m.intX = dae.Integral(m.x, m.t, wrt=m.x, rule=_intX)
def _intT(m, t):
return m.intX[t]
m.intT = dae.Integral(m.t, wrt=m.t, rule=_intT)
def _obj(m):
e = 0.5 * m.intT
for x in sorted(m.x):
if x == start_x or x == end_x:
pass
else:
e += 0.5 * 0.5 * dx * dt * m.omega * m.u[x, start_t] ** 2
return e
m.obj = pe.Objective(rule=_obj)
return m
class BurgersInterface(parapint.interfaces.MPIDynamicSchurComplementInteriorPointInterface):
def __init__(self, start_t, end_t, num_time_blocks, nfe_t, nfe_x):
self.nfe_x = nfe_x
self.dt = (end_t - start_t) / float(nfe_t)
super(BurgersInterface, self).__init__(start_t=start_t,
end_t=end_t,
num_time_blocks=num_time_blocks,
comm=comm)
def build_model_for_time_block(self, ndx, start_t, end_t, add_init_conditions):
dt = self.dt
nfe_t = math.ceil((end_t - start_t) / dt)
m = build_burgers_model(
nfe_x=self.nfe_x, nfe_t=nfe_t, start_t=start_t, end_t=end_t,
add_init_conditions=add_init_conditions
)
return (m,
([m.y[x, start_t] for x in sorted(m.x) if x not in {0, 1}]),
([m.y[x, end_t] for x in sorted(m.x) if x not in {0, 1}]))
def write_csv(fname, args, timer):
if rank == 0:
if os.path.exists(fname):
needs_header = False
else:
needs_header = True
f = open(fname, "a")
writer = csv.writer(f)
fieldnames = ['end_t', 'nfe_x', 'nfe_t', 'size', 'n_blocks']
timer_identifiers = timer.get_timers()
fieldnames.extend(timer_identifiers)
if needs_header:
writer.writerow(fieldnames)
row = [args.end_t, args.nfe_x, args.nfe_t, size, args.nblocks]
row.extend(timer.get_total_time(name) for name in timer_identifiers)
writer.writerow(row)
f.close()
def run_parallel(args, subproblem_solver_class, subproblem_solver_options):
interface = BurgersInterface(start_t=0,
end_t=args.end_t,
num_time_blocks=args.nblocks,
nfe_t=args.nfe_t,
nfe_x=args.nfe_x)
linear_solver = parapint.linalg.MPISchurComplementLinearSolver(
subproblem_solvers={ndx: subproblem_solver_class(**subproblem_solver_options) for ndx in range(args.nblocks)},
schur_complement_solver=subproblem_solver_class(**subproblem_solver_options))
options = parapint.algorithms.IPOptions()
options.linalg.solver = linear_solver
timer = HierarchicalTimer()
comm.Barrier()
status = parapint.algorithms.ip_solve(interface=interface, options=options, timer=timer)
assert status == parapint.algorithms.InteriorPointStatus.optimal
interface.load_primals_into_pyomo_model()
fname = "parallel_results.csv"
write_csv(fname, args, timer)
def run_full_space(args, subproblem_solver_class, subproblem_solver_options):
m = build_burgers_model(nfe_x=args.nfe_x, nfe_t=args.nfe_t, start_t=0,
end_t=args.end_t, add_init_conditions=True)
interface = parapint.interfaces.InteriorPointInterface(m)
linear_solver = subproblem_solver_class(**subproblem_solver_options)
options = parapint.algorithms.IPOptions()
options.linalg.solver = linear_solver
timer = HierarchicalTimer()
status = parapint.algorithms.ip_solve(interface=interface, options=options, timer=timer)
assert status == parapint.algorithms.InteriorPointStatus.optimal
interface.load_primals_into_pyomo_model()
fname = "serial_results.csv"
write_csv(fname, args, timer)
if __name__ == '__main__':
args = Args()
args.parse_arguments()
# cntl[1] is the MA27 pivot tolerance
if args.method == 'psc':
run_parallel(
args=args,
subproblem_solver_class=parapint.linalg.InteriorPointMA27Interface,
subproblem_solver_options={'cntl_options': {1: 1e-6}}
)
else:
run_full_space(
args=args,
subproblem_solver_class=parapint.linalg.InteriorPointMA27Interface,
subproblem_solver_options={'cntl_options': {1: 1e-6}}
)
|
# Generated by Django 2.1.5 on 2019-01-28 16:09
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('homepage', '0004_posts_views'),
]
operations = [
migrations.AlterField(
model_name='posts',
name='views',
field=models.DecimalField(blank=True, decimal_places=0, default=0, max_digits=5, null=True),
),
]
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from json import dumps
from json import loads
from django.test.client import Client
from rest_framework.exceptions import AuthenticationFailed
from networkapi.test.test_case import NetworkApiTestCase
from networkapi.api_channel.views import DeployChannelConfV3View
from nose.tools import nottest
class TestChannelRoutes(NetworkApiTestCase):
""" Class to test Channels http routes """
fixtures = [
'networkapi/system/fixtures/initial_variables.json',
'networkapi/usuario/fixtures/initial_usuario.json',
'networkapi/grupo/fixtures/initial_ugrupo.json',
'networkapi/usuario/fixtures/initial_usuariogrupo.json',
'networkapi/grupo/fixtures/initial_permissions.json',
'networkapi/grupo/fixtures/initial_permissoes_administrativas.json',
"networkapi/api_interface/fixtures/initial_interface.json",
"networkapi/api_channel/fixtures/initial_channel.json"
]
def setUp(self):
self.client = Client()
self.auth = self.get_http_authorization('test')
def test_should_receive_method_not_allowed(self):
""" Should receive method not allowed """
response = self.client.get(
'/api/v3/channel/1/deploy/',
content_type='application/json',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(405, response.status_code)
def test_should_not_allow_unauthenticated_user(self):
""" Should not allow requests from an unauthenticated user """
with self.assertRaises(AuthenticationFailed):
self.client.put(
'/api/v3/channel/1/deploy/',
data=dumps({}),
content_type='application/json',
HTTP_AUTHORIZATION=self.get_http_authorization('fake'))
def test_should_get_a_channel_by_id(self):
""" Should get a Channel by id """
response = self.client.get(
'/api/v3/channel/1/',
content_type='application/json',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(200, response.status_code)
def test_should_not_get_a_channel_by_id(self):
""" Should not get a Channel with an unexisting id """
response = self.client.get(
'/api/v3/channel/0/',
content_type='application/json',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(500, response.status_code)
def test_should_get_a_channel_by_name(self):
""" Should get a channel by Name """
response = self.client.get(
'/api/v3/channel/tor42/',
content_type='application/json',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(200, response.status_code)
content = loads(response.content)
self.assertEqual(content["channel"]["id"], 1)
def test_should_not_get_a_channel_by_name(self):
""" Should not get a channel with an unexisting Name """
response = self.client.get(
'/api/v3/channel/fakechannel/',
content_type='application/json',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(500, response.status_code)
@nottest
def test_should_post_a_channel(self):
""" Should post a Channel """
response = self.client.post(
'/api/v3/channel/1/',
content_type='application/json',
data=dumps({}),
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(201, response.status_code)
@nottest
def test_should_update_a_channel(self):
""" Should update a Channel """
response = self.client.put(
'/api/v3/channel/1/',
content_type='application/json',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(200, response.status_code)
def test_should_not_delete_an_unexistent_channel(self):
""" Should not delete an unexistent channel """
response = self.client.delete(
'/api/v3/channel/0/',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(400, response.status_code)
def test_should_delete_a_channel(self):
""" Should delete a channel """
response = self.client.delete(
'/api/v3/channel/1/',
HTTP_AUTHORIZATION=self.auth)
self.assertEqual(200, response.status_code)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.