text stringlengths 38 1.54M |
|---|
# -*- coding: UTF-8 -*-
#!/usr/bin/env python
#--------------------------------
# Name: dicoshapes.py
# Purpose: make an Excel file about shapefiles present in a databse files
# structured, gathering basic informations. The output file could
# be used as a database dictionary.
# Author: Julien Moura (https://github.com/Guts)
# Created: 06/10/2011
# Last update: 09/02/2013
# Python version: 2.7.3
# Language: French (fr-fr)
#--------------------------------
###################################
########### Libraries #############
###################################
from Tkinter import Tk # GUI
from tkFileDialog import askdirectory as doss_cible
from tkFileDialog import asksaveasfilename as savefic
from tkMessageBox import showinfo as info
from os import walk, path # files and folder managing
from os import startfile # open a file in Windows system
from time import localtime
from osgeo import ogr # spatial files
from xlwt import Workbook, Font, XFStyle, easyxf, Formula # excel library
from pickle import dump
from sys import exit
###################################
############ Functions ############
###################################
def listing_shapes(folderpath):
u""" List shapefiles contained in the folder and its subfolders """
global liste_shapes
for root, dirs, files in walk(folderpath):
for i in files:
if path.splitext(path.join(root, i))[1] == u'.shp' and \
path.isfile(path.join(root, i)[:-4] + u'.dbf') and \
path.isfile(path.join(root, i)[:-4] + u'.shx') and \
path.isfile(path.join(root, i)[:-4] + u'.prj'):
liste_shapes.append(path.join(root, i))
# end of function
return liste_shapes
def infos_ogr(shapepath):
u""" Uses gdal/ogr functions to extract basic informations about shapefile
given as parameter and store into the corresponding dictionary. """
global dico_infos_couche, dico_champs, liste_chps
source = ogr.Open(shapepath, 0) # OGR driver
couche = source.GetLayer() # get the layer
objet = couche.GetFeature(0) # get the first object (index 0)
geom = objet.GetGeometryRef() # get the geometry
def_couche = couche.GetLayerDefn() # get the layer definitions
srs = couche.GetSpatialRef() # get spatial system reference
srs.AutoIdentifyEPSG() # try to determine the EPSG code
# Storing into the dictionary
dico_infos_couche[u'nom'] = path.basename(shapepath)
dico_infos_couche[u'titre'] = dico_infos_couche[u'nom'][:-4].replace('_', ' ').capitalize()
dico_infos_couche[u'nbr_objets'] = couche.GetFeatureCount()
dico_infos_couche[u'nbr_attributs'] = def_couche.GetFieldCount()
dico_infos_couche[u'proj'] = unicode(srs.GetAttrValue("PROJCS")).replace('_', ' ')
dico_infos_couche[u'EPSG'] = unicode(srs.GetAttrValue("AUTHORITY", 1))
'''dico_infos_couche[u'EPSG'] = u"Projection : " + \
unicode(srs.GetAttrValue("PROJCS")).replace('_', ' ') + \
u" - Code EPSG : " + \
unicode(srs.GetAttrValue("AUTHORITY", 1))'''
# type géométrie
if geom.GetGeometryName() == u'POINT':
dico_infos_couche[u'type_geom'] = u'Point'
elif u'LINESTRING' in geom.GetGeometryName():
dico_infos_couche[u'type_geom'] = u'Ligne'
elif u'POLYGON' in geom.GetGeometryName():
dico_infos_couche[u'type_geom'] = u'Polygone'
else:
dico_infos_couche[u'type_geom'] = geom.GetGeometryName()
# Spatial extent (bounding box)
dico_infos_couche[u'Xmin'] = round(couche.GetExtent()[0],2)
dico_infos_couche[u'Xmax'] = round(couche.GetExtent()[1],2)
dico_infos_couche[u'Ymin'] = round(couche.GetExtent()[2],2)
dico_infos_couche[u'Ymax'] = round(couche.GetExtent()[3],2)
# Fields
i = 0
while i < def_couche.GetFieldCount():
liste_chps.append(def_couche.GetFieldDefn(i).GetName())
dico_champs[def_couche.GetFieldDefn(i).GetName()] = def_couche.GetFieldDefn(i).GetTypeName(),\
def_couche.GetFieldDefn(i).GetWidth(),\
def_couche.GetFieldDefn(i).GetPrecision()
i = i+1
dico_infos_couche[u'date_actu'] = unicode(localtime(path.getmtime(shapepath))[2]) +\
u'/'+ unicode(localtime(path.getmtime(shapepath))[1]) +\
u'/'+ unicode(localtime(path.getmtime(shapepath))[0])
dico_infos_couche[u'date_creation'] = unicode(localtime(path.getctime(shapepath))[2]) +\
u'/'+ unicode(localtime(path.getctime(shapepath))[1]) +\
u'/'+ unicode(localtime(path.getctime(shapepath))[0])
# end of function
return dico_infos_couche, dico_champs, liste_chps
###################################
########### Variables #############
###################################
liste_shapes = [] # list for shapefiles path
dico_infos_couche = {} # dictionary where will be stored informations
dico_champs = {} # dictionary for fields information
dico_err = {} # errors list
today = unicode(localtime()[0]) + u"-" +\
unicode(localtime()[1]) + u"-" +\
unicode(localtime()[2]) # date of the day
###################################
####### Output Excel file #########
###################################
# Basic configuration
book = Workbook(encoding = 'utf8')
feuy1 = book.add_sheet(u'Shapes', cell_overwrite_ok=True)
# Some customization: fonts and styles
# first line style
entete = XFStyle()
font1 = Font()
font1.name = 'Times New Roman'
font1.bold = True
entete.font = font1
# hyperlinks style
url = easyxf(u'font: underline single')
# errors style
erreur = easyxf(u'font: name Arial, bold 1, colour red')
# columns name
feuy1.write(0, 0, u'Nom fichier', entete)
feuy1.write(0, 1, u'Chemin', entete)
feuy1.write(0, 2, u'Thème', entete)
feuy1.write(0, 3, u'Type géométrie', entete)
feuy1.write(0, 4, u'Emprise', entete)
feuy1.write(0, 5, u'Projection', entete)
feuy1.write(0, 6, u'EPSG', entete)
feuy1.write(0, 7, u'Nombre de champs', entete)
feuy1.write(0, 8, u'Nombre d\'objets', entete)
feuy1.write(0, 9, u'Date de l\'information', entete)
feuy1.write(0, 10, u'Date dernière actualisation', entete)
feuy1.write(0, 11, u'Liste des champs', entete)
###################################################
################## Main program ###################
###################################################
# Folder "target"
root = Tk()
root.withdraw()
cible = doss_cible()
if cible == "": # if any folder is choosen: stop the program
root.destroy()
exit()
# Listing of shapefiles into the folder
listing_shapes(cible)
if len(liste_shapes) == 0: # if any shapefiles has been found: stop the program
root.destroy()
exit()
# Reading the shapefiles found
lig = 1
for couche in liste_shapes:
# reset variables
dico_infos_couche.clear()
dico_champs.clear()
liste_chps = []
champs = ""
theme = ""
try:
infos_ogr(couche)
except:
dico_err[couche] = u"Probleme dans la structure du shape." + \
"\n \n"
continue
# Add the shape informations to the Excel file
# Name
feuy1.write(lig, 0, dico_infos_couche.get('nom'))
# Path of containing folder formatted to be a hyperlink
lien = 'HYPERLINK("' + \
couche.strip(dico_infos_couche.get('nom')) + \
'"; "Atteindre le dossier")' # chemin formaté pour être un lien
feuy1.write(lig, 1, Formula(lien), url)
# Name of containing folder
if path.basename(path.dirname(couche)) != 'shp':
feuy1.write(lig, 2, path.basename(path.dirname(couche)))
else:
feuy1.write(lig, 2, path.basename(path.dirname(path.dirname(couche))))
# Geometry type
feuy1.write(lig, 3, dico_infos_couche.get(u'type_geom'))
# Spatial extent
emprise = u"Xmin : " + unicode(dico_infos_couche.get(u'Xmin')) +\
u", Xmax : " + unicode(dico_infos_couche.get(u'Xmax')) +\
u", Ymin : " + unicode(dico_infos_couche.get(u'Ymin')) +\
u", Ymax : " + unicode(dico_infos_couche.get(u'Ymax'))
feuy1.write(lig, 4, emprise)
# Name of srs
feuy1.write(lig, 5, dico_infos_couche.get(u'proj'))
# EPSG code
feuy1.write(lig, 6, dico_infos_couche.get(u'EPSG'))
# Number of fields
feuy1.write(lig, 7, dico_infos_couche.get(u'nbr_attributs'))
# Name of objects
feuy1.write(lig, 8, dico_infos_couche.get(u'nbr_objets'))
# Creation date
feuy1.write(lig, 9, dico_infos_couche.get(u'date_creation'))
# Last update date
feuy1.write(lig, 10, dico_infos_couche.get(u'date_actu'))
# Field informations
for chp in liste_chps:
# field type
if dico_champs[chp][0] == 'Integer' or dico_champs[chp][0] == 'Real':
tipo = u'Numérique'
elif dico_champs[chp][0] == 'String':
tipo = u'Texte'
elif dico_champs[chp][0] == 'Date':
tipo = u'Date'
try:
# concatenation of field informations
champs = champs +\
chp +\
" (" + tipo +\
", Lg. = " + unicode(dico_champs[chp][1]) +\
", Pr. = " + unicode(dico_champs[chp][2]) + ") ; "
except UnicodeDecodeError:
# write a notification into the log file
dico_err[couche] = u"Problème d'encodage sur le champ : " + \
chp.decode('latin1') + \
"\n\n"
# décode le nom du champ litigieux
champs = champs +\
chp.decode('utf8') +\
" (" + tipo +\
", Lg. = " + unicode(dico_champs[chp][1]) +\
", Pr. = " + unicode(dico_champs[chp][2]) + ") ; "
continue
# Once all fieds explored, wirte into the output file
feuy1.write(lig, 11, champs)
# And go to the next line
lig = lig +1
## Save the Excel file
# Prompt of folder where save the file
saved = savefic(initialdir= cible,
defaultextension = '.xls',
initialfile = "DicoShapes_" + today + "_",
filetypes = [("Classeurs Excel","*.xls")])
if path.splitext(saved)[1] != ".xls":
saved = saved + ".xls"
book.save(saved)
## Log information to the user
if dico_err.keys() == []: # s'il n'y a pas d'erreur
info(title=u"Fin de programme", message=u"Programme terminé.\
\nAucune erreur rencontrée.")
else: # s'il y a des erreurs, création d'un fichier log
fic = open(cible + "\\" + today + "_dico-shapes_log.txt", 'w')
dump("Erreurs rencontrées sur les tables suivantes : \n\n", fic)
fic.write('/n/n')
dump(dico_err, fic)
fic.close()
info(title=u"Fin de programme", message=u"Programme terminé.\
\nConsultez le fichier log créé pour les détails : \
\n" + cible + u"\\" + unicode(today) + u"_dico-shapes_log.txt")
# End of program
startfile(fic.name)
startfile(cible)
del book |
# Задание 3
def my_func(number1, number2, number3):
numbers = [number1, number2, number3]
max1 = max(numbers)
numbers.remove(max1)
max2 = max(numbers)
return max1 + max2
numbers = [1, 2, 10]
print(f"Из списка {numbers} сумма наибольших двух аргументов = " +
f"{my_func(numbers[0], numbers[1], numbers[2])}")
|
Administrador = u'Administrador'
Atendente = u'Atendente'
Gerente = u'Gerente'
Vendedor = u'Vendedor'
|
"""
SocksiPy + urllib.request handler
This module provides a Handler which you can use with urllib.request
to allow it to tunnel your connection through a socks.sockssocket socket,
with out monkey patching the original socket...
"""
import base64
import urllib.request
from . import socks
try:
from requests.packages.urllib3.connection import HTTPConnection, HTTPSConnection
except ImportError:
import ssl
import socket
import http.client
class HTTPConnection(http.client.HTTPConnection):
def __init__(self, *args, **kw):
kw.pop('strict') # works only in Python2, removed from Py3.4
super().__init__(*args, **kw)
def _new_conn(self):
raise NotImplementedError
def connect(self):
self.sock = self._new_conn()
class HTTPSConnection(HTTPConnection, http.client.HTTPSConnection):
def connect(self):
"""Connect to a host on a given (SSL) port.
Note: Whole copy of original method, except initial socket creation
"""
sock = self._new_conn()
if self._tunnel_host:
self.sock = sock
self._tunnel()
server_hostname = self.host if ssl.HAS_SNI else None
self.sock = self._context.wrap_socket(sock, server_hostname=server_hostname)
try:
if self._check_hostname:
ssl.match_hostname(self.sock.getpeercert(), self.host)
except Exception:
self.sock.shutdown(socket.SHUT_RDWR)
self.sock.close()
raise
class ProxyHTTPConnection(HTTPConnection):
def __init__(self, *args, chain=(), **kw):
super().__init__(*args, **kw)
self.routes = socks.RoutingTable.from_addresses(chain, dst=self.host)
def _new_conn(self):
sock = socks.socksocket(routes=self.routes)
if type(self.timeout) in (int, float):
sock.settimeout(self.timeout)
sock.connect((self.host, self.port))
return sock
class ProxyHTTPSConnection(ProxyHTTPConnection, HTTPSConnection):
pass
class ChainProxyHandler(urllib.request.HTTPHandler, urllib.request.HTTPSHandler, urllib.request.ProxyHandler):
def __init__(self, chain=()):
super().__init__()
last_hop = chain and socks.parse_proxy(chain[-1]) or None
self.chain = chain
self._last_hop = last_hop # cache last hop info for internal checks
def is_chain_http_end(self):
if self._last_hop is None:
return False
return self._last_hop.type == socks.PROXY_TYPE_HTTP
def _create_http_conn(self, *args, **kw):
return ProxyHTTPConnection(*args, chain=self.chain, **kw)
def _create_https_conn(self, *args, **kw):
return ProxyHTTPSConnection(*args, chain=self.chain, **kw)
@staticmethod
def install_http_proxy(req, proxy):
if proxy.username and proxy.password:
user_pass = '%s:%s' % (proxy.username, proxy.password)
creds = base64.b64encode(user_pass.encode()).decode("ascii")
req.add_header('Proxy-authorization', 'Basic ' + creds)
host_port = '%s:%s' % (proxy.addr, proxy.port)
proxy_type = socks.PRINTABLE_PROXY_TYPES[proxy.type]
req.set_proxy(host_port, proxy_type)
def http_open(self, req):
if self.is_chain_http_end():
self.install_http_proxy(req, self._last_hop)
return self.do_open(self._create_http_conn, req)
def https_open(self, req):
if self.is_chain_http_end():
self.install_http_proxy(req, self._last_hop)
return self.do_open(self._create_https_conn, req)
if __name__ == "__main__":
chain = [
'tor://localhost/',
]
opener = urllib.request.build_opener(ChainProxyHandler(chain=chain))
print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode())
print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode())
|
#!/usr/bin/env python
import operator
exit_code_output = {0: 'OK',
1: 'WARNING',
2: 'CRITICAL',
3: 'UNKNOWN',
}
exit_code = 0
# Get threshold
data = {}
outputs = []
output = ""
perf_data = ""
operator_name = 'ge'
thyallperfs = allperfs(self)
for metric_name, metric_value in thyallperfs.items():
warn = None
crit = None
#print "Hello %s, %s" % (metric_name , metric_value.value)
pkgtype = metric_name.split('-')[2]
pkgmgr = metric_name.split('-')[1]
if not pkgmgr in data:
data[pkgmgr] = {}
data[pkgmgr][pkgtype] = int(metric_value.value)
if pkgtype == 'security':
crit = 1
warn = None
else:
warn = self.host.customs.get('_PKGS_%s_MAX_WARN' % pkgtype.upper() , None)
crit = self.host.customs.get('_PKGS_%s_MAX_CRIT' % pkgtype.upper() , None)
this_exit_code = 0
if not warn is None:
warn = int(warn)
if getattr(operator, operator_name)(data[pkgmgr][pkgtype], warn):
this_exit_code = 1
if not crit is None:
crit = int(crit)
if getattr(operator, operator_name)(data[pkgmgr][pkgtype], crit):
this_exit_code = 2
if this_exit_code > 0:
outputs.append('%s %s update(s)' % (data[pkgmgr][pkgtype], pkgtype))
if exit_code < this_exit_code:
exit_code = this_exit_code
perf_data += " %s=%d;%s;%s;0;" % (pkgtype, data[pkgmgr][pkgtype], str(warn or ''), str(crit or ''))
# Finish output
output = " - ".join((data.keys()[0].upper(), exit_code_output[exit_code]))
if len(outputs) > 0:
output += ': ' + ', '.join(outputs)
# Set ouput
set_value(self, output, perf_data, exit_code)
|
import nltk
nltk.download('stopwords')
from sklearn.feature_extraction.text import CountVectorizer
from nltk import tokenize
from nltk.stem.snowball import SnowballStemmer
from collections import Counter
import numpy as np
class Featurizer(object):
def __init__(self):
self.stemmer = SnowballStemmer('english', ignore_stopwords=True)
self.tkn = tokenize.TreebankWordTokenizer()
self.fit_text_title = False
self.text_title_count_v = CountVectorizer(stop_words='english', min_df=50, tokenizer=self.my_tokenizer)
self.white_space_tkn = tokenize.WhitespaceTokenizer().tokenize
self.source_count_v = CountVectorizer(lowercase=False, tokenizer=self.white_space_tkn)
self.person_count_v = CountVectorizer(lowercase=False, tokenizer=self.white_space_tkn)
def fit_text_and_title(self, total_introductions):
if self.fit_text_title:
return
else:
texts = Featurizer.get_attr_val_from_introductions('text', total_introductions)
titles = Featurizer.get_attr_val_from_introductions('title', total_introductions)
self.text_title_count_v.fit(texts + titles)
self.fit_text_title = True
return
def fit_text_one_hot(self, total_introductions):
self.fit_text_and_title(total_introductions)
texts = Featurizer.get_attr_val_from_introductions('text', total_introductions)
transformed = self.text_title_count_v.transform(texts)
return transformed.toarray(), self.text_title_count_v.get_feature_names()
def fit_title_one_hot(self, total_introductions):
self.fit_text_and_title(total_introductions)
titles = Featurizer.get_attr_val_from_introductions('title', total_introductions)
transformed = self.text_title_count_v.transform(titles)
return transformed.toarray(), self.text_title_count_v.get_feature_names()
def fit_source_one_hot(self, total_introductions):
sources = Featurizer.get_attr_val_from_introductions('source', total_introductions)
transformed = self.source_count_v.fit_transform(sources)
return transformed.toarray(), self.source_count_v.get_feature_names()
def fit_person_one_hot(self, total_introductions):
persons = Featurizer.get_attr_val_from_introductions('person', total_introductions)
transformed = self.person_count_v.fit_transform(persons)
return transformed.toarray(), self.person_count_v.get_feature_names()
@staticmethod
def get_attr_val_from_introductions(attr, total_introductions):
return [intro[attr] for intro in total_introductions]
def my_tokenizer(self, sen):
ts = self.tkn.tokenize(sen)
res = []
for t in ts:
if t.isalpha():
res.append(t)
return [self.stemmer.stem(r) for r in res]
@staticmethod
def transfer_val(from_intros, to_intros, from_keys, to_keys, assertive_key, backup_key):
assert len(from_intros) == len(to_intros), "From and to intros should be the same size"
assert len(from_keys) == len(to_keys), "From and to keys should be the same size"
for i in range(len(from_intros)):
f_intro = from_intros[i]
to_intro = to_intros[i]
if assertive_key in f_intro:
assert f_intro[assertive_key] == to_intro[assertive_key]
else:
assert f_intro[backup_key] == to_intro[backup_key]
for j in range(len(from_keys)):
f_key = from_keys[j]
t_key = to_keys[j]
to_intro[t_key] = f_intro[f_key]
def count_by_key(introductions, key):
c = Counter()
for i in introductions:
c[i[key]] += 1
return c
def adjust_imbalance_pca(data, introductions, source_weights):
"""
:param data: matrix to scale row wise
:param introductions: intro arrays
:param source_weights: map source to real number weights
:return:
"""
assert data.shape[0] == len(introductions)
scaled_data = np.zeros_like(data, dtype=np.float32)
for i in range(len(introductions)):
source = introductions[i]['source']
w = source_weights[source]
scaled_data[i,:] = data[i, :] * w
return scaled_data
def random_indx_same_proportions(introductions, n):
idx_and_sources = [(idx, intro['source']) for idx, intro in enumerate(introductions)]
source_idx_map = {}
for i_s in idx_and_sources:
source_idx_map.setdefault(i_s[1], [])
source_idx_map[i_s[1]].append(i_s[0])
K = len(source_idx_map)
n_k = int(n/K)
ran_idx = np.zeros((K, n_k), dtype=np.int32)
for i, s in enumerate(source_idx_map):
ran_idx[i, :] = np.random.choice(source_idx_map[s], n_k)
return ran_idx.reshape(-1)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Note: To use the 'upload' functionality of this file, you must:
# $ pip install twine
import io
import os
from setuptools import setup
# Package meta-data.
NAME = 'get-pybrowser'
DESCRIPTION = 'Selenium based, user friendly Browser Automation API'
URL = 'https://github.com/abranjith/pybrowser'
EMAIL = 'abranjith@gmail.com'
AUTHOR = 'ranjith'
VERSION = '0.2.0'
README_CONTENT_TYPE = 'text/markdown'
# What packages are required for this module to be executed?
REQUIRED = [
'requests', 'selenium==3.141.0', 'pyquery==1.4.0', 'pyppeteer==0.0.25'
]
#here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
with open("README.md", "r") as fh:
long_description = fh.read()
# Where the magic happens:
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=long_description,
long_description_content_type=README_CONTENT_TYPE,
author=AUTHOR,
author_email=EMAIL,
url=URL,
python_requires='>=3.7.0',
# If your package is a single module, use this instead of 'packages':
packages=['pybrowser', 'pybrowser.elements', 'pybrowser.external'],
install_requires=REQUIRED,
include_package_data=True,
license='MIT',
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
]
) |
class Node(object):
def __init__(self, data):
self.data = data ##
self.next = None
class LinkedList(object):
def __init__(self):
self.size = 0
self.head = None
def insertstart(self, data):
newNode = Node(data) ##
self.size += 1
if not self.head:
self.head = newNode
else:
newNode.next = self.head
self.head = newNode ##
def remove(self, data): ##
if self.head is None:
return
node = self.head
previous_node = None
self.size -= 1
while node.data != data:
previous_node = node
node = node.next
if previous_node is None:
self.head = node.next;
else:
previous_node.next = node.next;
def size1(self):
return self.size
def size2(self):
size = 0
temp = self.head
while temp.next is not None:
size += 1
temp = temp.next
return size
def insertend(self, data):
new_node = Node(data)
temp = self.head
self.size += 1
while temp.next is not None:
temp = temp.next
temp.next = new_node
def traverse(self):
temp = self.head
while temp is not None: ##
print(f"{temp.data}")
temp = temp.next
l = LinkedList()
l.insertstart(12)
l.insertstart(13)
l.insertstart(1)
l.insertend(19)
l.traverse()
l.remove(13)
print('\n')
l.traverse()
print('\n')
l.size2()
l.remove(1)
l.traverse()
print('\n')
l.insertend(12)
l.insertend(13)
l.remove(12)
l.traverse()
|
# Generated by Django 3.0 on 2019-12-10 02:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('plotapp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='inputfile',
name='status',
field=models.CharField(choices=[('queued', 'queued'), ('processing', 'processing'), ('done', 'done'), ('error', 'error')], default='queued', max_length=50, verbose_name='Staus'),
),
]
|
from blcscan import BLCScan1
from device.nct import NCT
from epics import caput, caget
import time
import threading
import math
import pandas as pd
class ScanRC(BLCScan1):
'''
Scan for rocking curve
DATE: 2018-5
m: motor, Motor object
d: detectors, BL09BNCT object
step > 0
'''
def __init__(self, start, step, end, m, d):
super().__init__(start, step, end, m, d)
self.change_dn()
#self.dn = 2
def change_dn(self):
self.dn = 2
self.mRA = [0 for i in range(self.npts)]
self.dRA = [[0 for i in range(self.npts)] for j in range(self.dn)]
def scan_logic(self):
pos = self.get_pos()
#caput("X09B1:RC:D0CV", 0, wait=True)
#caput("X09B1:RC:D1CV", 0, wait=True)
#caput("X09B1:RC:MCV", 0, wait=True)
for i in range(self.npts):
if self.stopped == 1:
break
caput("X09B1:RC:Next", pos[i], wait=True)
self.move_motor(pos[i])
self.mRA[i] = self.read_motor()
# detector value list @ current scan position
dvs = self.read_detectors()
for j in range(len(dvs)):
self.dRA[j][i] = dvs[j]
#print(self.mRA[i], dvs[0], dvs[1])
#caput("X09B1:RC:D0CV", dvs[0], wait=True)
#caput("X09B1:RC:D1CV", dvs[1], wait=True)
#caput("X09B1:RC:MCV", self.mRA[i], wait=True)
def save_data(self, filename):
table = {"motor readback":self.mRA, "I0":self.dRA[0], "I1":self.dRA[1]}
tb = pd.DataFrame(table)
tb.to_csv(filename)
def read_detectors(self):
return(self.d[0].read_once1())
class ScanXAFS(BLCScan1):
'''
Scan for XAFS
DATE: 2018-5
m: motor, Motor object
d: detectors, BL09BNCT object
step > 0
'''
def __init__(self, start, step, end, m, d):
super().__init__(start, step, end, m, d)
def scan_logic(self):
pos = self.get_pos()
for i in range(self.npts):
if self.stopped == 1:
break
caput("X09B1:XAFS:Next", pos[i], wait=True)
self.move_motor(pos[i])
self.mRA[i] = self.read_motor()
# detector value list @ current scan position
dvs = self.read_detectors()
for j in range(len(dvs)):
self.dRA[j][i] = dvs[j]
lncv = math.log(math.e, dvs[0]/dvs[1])
caput("X09B1:XAFS:D0CV", dvs[0], wait=True)
caput("X09B1:XAFS:D1CV", dvs[1], wait=True)
caput("X09B1:XAFS:LNCV", lncv, wait=True)
caput("X09B1:XAFS:MCV", self.mRA[i], wait=True)
#def read_detectors(self):
#return(self.d.read_once1())
class BL09BNCT(NCT):
"""
NCT08-01 @ BL09B1
DATE: 2018-5
"""
def __init__(self, prefix):
super().__init__(prefix)
self.itime = self.p+":SetTimer.A"
#self.t = 1
def set_itime(self, t=1):
caput(self.itime, t, wait=True)
self.t = t
# for monitor
def read_once(self):
self.trigger()
time.sleep(self.t)
self.read()
# for scan
def read_once1(self):
self.trigger()
time.sleep(caget(self.itime))
f0, f1, v0, v1 = self.read()
return [f0, f1]
class BL09BXAFS(object):
"""
xafs piecewise scan
DATE: 2018-5
energy: energy list
step: step list
itime: itime list
m: motor, Motor object
d: detectors, BL09BNCT object
step > 0
"""
def __init__(self, energy, step, itime, m, d):
self.energy = energy
self.step = step
self.itime = itime
self.m = m
self.d = d
self.stopped = 0
self.lock = threading.Lock()
def scan(self):
t = threading.Thread(target=self.scan_process, args=())
t.start()
def scan_process(self):
self.lock.acquire()
try:
self.stopped = 0
finally:
self.lock.release()
self.finished = 0
self.scan_logic()
self.finished = 1
def scan_logic(self):
for i in range(len(self.step)):
print("Seg", i)
start = self.energy[i]
step = self.step[i]
end = self.energy[i+1]
print(start, step, end)
if self.stopped == 1:
break
subscan = ScanXAFS(start, step, end, self.m, self.d)
subscan.set_settlingTime(caget("X09B1:XAFS:SettlingTime"))
subscan.scan()
time.sleep(1)
# wait until subscan finished or button Stop pressed
while(subscan.finished == 0):
if self.stopped == 1:
subscan.stop()
break
time.sleep(1)
print(subscan.mRA, subscan.dRA)
def stop(self):
self.lock.acquire()
try:
self.stopped = 1
finally:
self.lock.release()
#def read_detectors(self):
#return(self.d.read_once1()) |
#!/usr/bin/env python3
######################################################################
## Author: Carl Schaefer, Smithsonian Institution Archives
######################################################################
message_groups = {}
######################################################################
def message_group_for_account (account_id):
global message_groups
if account_id not in message_groups.keys():
message_groups[account_id] = set()
return message_groups[account_id]
|
# -- coding: utf-8 --
from sys import argv
script,filename = argv
txt = open(filename)
txt.close()
print ("Here is your file %r:" % filename)
print (txt.read())
print ("Type the filename again:")
file_again = input(">")
txt_again = open(file_again)
print (txt_again.read())
#remember to close the file when you are done
txt_again.close()
|
import csv
import os
import random
import sys
import time
import pygame
screen_width, screen_height = 960, 640
pygame.init()
pygame.display.set_caption('Level Editor')
myfont = pygame.font.SysFont('Comic Sans MS', 30)
screen = pygame.display.set_mode((screen_width, screen_height))
clock = pygame.time.Clock()
class Boundary:
def __init__(self, bound_screen):
self.screen = bound_screen.get_rect()
self.left_height = pygame.Rect((0, 0, 1, self.screen.height))
self.top_width = pygame.Rect((0, 0, self.screen.width, 1))
self.right_height = pygame.Rect((self.screen.width - 1, 0, 1, self.screen.height))
self.bottom_width = pygame.Rect((0, self.screen.height - 1, self.screen.width, 1))
self.bound_list = [self.left_height, self.top_width, self.right_height, self.bottom_width]
screen_border = Boundary(screen)
class LevelEditor:
def __init__(self):
self.sprite_list = [
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Blue.png'),
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Brown.png'),
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Gray.png'),
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Green.png'),
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Pink.png'),
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Purple.png'),
pygame.image.load(r'C:\Users\ryous\OneDrive\Documents\GitHub\replKAJAM\replKajam\img\level\Background'
r'\Yellow.png'),
]
self.sprite_list_rect = []
self.sprite_menu_height = screen_height // self.sprite_list[
0].get_height() # remainder 8 # check amount of sprites that can fit in column
self.sprite_list_index = 0
self.mouse_x, self.mouse_y = pygame.mouse.get_pos()
self.mouse_rect = pygame.Rect(self.mouse_x, self.mouse_y, 5, 5)
self.mouse_click = False
self.clicked_sprite = False
def get_mouse_pos(self): # get mouse positon
self.mouse_x, self.mouse_y = pygame.mouse.get_pos()
self.mouse_rect = pygame.Rect(self.mouse_x, self.mouse_y, 5, 5)
def get_sprite_rect(self): # creates the rects for sprite_list
y_column = 0
for sprite_block in range(self.sprite_menu_height):
if sprite_block < len(self.sprite_list):
block_x, block_y, block_width, block_height = self.sprite_list[sprite_block].get_rect()
self.sprite_list[sprite_block].get_rect().y += block_height # add new height
self.sprite_list_rect.append([block_x, y_column, block_width, block_height])
y_column += block_height
self.y_grid = 0
def sprite_menu(self): # displays sprites
for (image, image_rect) in zip(self.sprite_list, self.sprite_list_rect):
if self.y_grid <= screen_width:
self.y_grid = self.y_grid + 9
screen.blit(image, image_rect)
pygame.draw.rect(screen, (250, 250, 250), (image_rect[0], image_rect[1], screen_width, image_rect[3]), 1)
pygame.draw.rect(screen, (0, 0, 0), image_rect, 2)
def menu_clicked(self): # checks if mouse interacts with sprite_menu
if self.mouse_click:
self.sprite_list_index = 0
for img_rect in self.sprite_list_rect:
self.sprite_list_index += 1
if self.mouse_rect.colliderect(img_rect):
self.mouse_sprite_collide = img_rect
self.clicked_sprite = True
break
else:
self.clicked_sprite = False
def sprite_place(self): # allows sprite to be placed with rect argument
if self.clicked_sprite: # highlights clicked sprite
pygame.draw.rect(screen, (255, 200, 0), self.mouse_sprite_collide, 3)
pygame.display.update()
if self.clicked_sprite:
if self.mouse_click and not self.mouse_rect.colliderect(self.mouse_sprite_collide):
screen.blit(self.sprite_list[self.sprite_list_index - 1], (self.mouse_x, self.mouse_y))
self.level_save()
self.mouse_click = False
def sprite_edit(self): # allows for sprites to be edited
pass
def level_save(self): # saves level built
with open('level.csv', 'a', newline='') as csv_file:
writer = csv.writer(csv_file)
writer.writerow((self.sprite_list_index - 1, self.mouse_x, self.mouse_y))
def game_event():
for event in pygame.event.get(): # loop to quit game
level.mouse_click = False
if event.type == pygame.MOUSEBUTTONDOWN:
if pygame.mouse.get_pressed() == (1, 0, 0):
level.mouse_click = True
level.get_mouse_pos()
if event.button == 3:
level.clicked_sprite = False
if event.type == pygame.QUIT: # fix this
level.level_save()
pygame.quit()
sys.exit()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
level.level_save()
pygame.quit()
sys.exit()
def redraw():
level.sprite_menu()
pygame.display.update()
mx, my = pygame.mouse.get_pos()
level = LevelEditor()
screen.fill((220, 220, 220))
level.get_sprite_rect()
run = True
while run:
clock.tick(20)
game_event()
redraw()
if not level.clicked_sprite:
level.menu_clicked()
level.sprite_place()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-27 00:33
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Batting',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year_code', models.CharField(blank=True, max_length=50, null=True)),
('stint', models.CharField(blank=True, max_length=50, null=True)),
('team_code', models.CharField(blank=True, max_length=50, null=True)),
('league_code', models.CharField(blank=True, max_length=50, null=True)),
('games', models.CharField(blank=True, max_length=50, null=True)),
('at_bats', models.CharField(blank=True, max_length=50, null=True)),
('runs', models.CharField(blank=True, max_length=50, null=True)),
('hits', models.CharField(blank=True, max_length=50, null=True)),
('doubles', models.CharField(blank=True, max_length=50, null=True)),
('triples', models.CharField(blank=True, max_length=50, null=True)),
('homeruns', models.CharField(blank=True, max_length=50, null=True)),
('runs_batted_in', models.CharField(blank=True, max_length=50, null=True)),
('stolen_bases', models.CharField(blank=True, max_length=50, null=True)),
('caught_stealing', models.CharField(blank=True, max_length=50, null=True)),
('walks', models.CharField(blank=True, max_length=50, null=True)),
('strikeouts', models.CharField(blank=True, max_length=50, null=True)),
('intentional_walks', models.CharField(blank=True, max_length=50, null=True)),
('hit_by_pitch', models.CharField(blank=True, max_length=50, null=True)),
('sacrifice_hits', models.CharField(blank=True, max_length=50, null=True)),
('sacrifice_flies', models.CharField(blank=True, max_length=50, null=True)),
('grounded_into_double_plays', models.CharField(blank=True, max_length=50, null=True)),
],
),
migrations.CreateModel(
name='Fielding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year_code', models.CharField(blank=True, max_length=50, null=True)),
('stint', models.CharField(blank=True, max_length=50, null=True)),
('team_code', models.CharField(blank=True, max_length=50, null=True)),
('league_code', models.CharField(blank=True, max_length=50, null=True)),
('position', models.CharField(blank=True, max_length=50, null=True)),
('games', models.CharField(blank=True, max_length=50, null=True)),
('games_started', models.CharField(blank=True, max_length=50, null=True)),
('inn_outs', models.CharField(blank=True, max_length=50, null=True)),
('putouts', models.CharField(blank=True, max_length=50, null=True)),
('assists', models.CharField(blank=True, max_length=50, null=True)),
('errors', models.CharField(blank=True, max_length=50, null=True)),
('double_plays', models.CharField(blank=True, max_length=50, null=True)),
('passed_balls', models.CharField(blank=True, max_length=50, null=True)),
('wild_pitches', models.CharField(blank=True, max_length=50, null=True)),
('opp_stolen_bases', models.CharField(blank=True, max_length=50, null=True)),
('opp_caught_stealing', models.CharField(blank=True, max_length=50, null=True)),
('zone_rating', models.CharField(blank=True, max_length=50, null=True)),
],
),
migrations.CreateModel(
name='Master',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('player_code', models.CharField(blank=True, max_length=50, null=True)),
('birth_year', models.CharField(blank=True, max_length=50, null=True)),
('birth_month', models.CharField(blank=True, max_length=50, null=True)),
('birth_day', models.CharField(blank=True, max_length=50, null=True)),
('birth_country', models.CharField(blank=True, max_length=50, null=True)),
('birth_state', models.CharField(blank=True, max_length=50, null=True)),
('birth_city', models.CharField(blank=True, max_length=50, null=True)),
('death_year', models.CharField(blank=True, max_length=50, null=True)),
('death_month', models.CharField(blank=True, max_length=50, null=True)),
('death_day', models.CharField(blank=True, max_length=50, null=True)),
('death_country', models.CharField(blank=True, max_length=50, null=True)),
('death_state', models.CharField(blank=True, max_length=50, null=True)),
('death_city', models.CharField(blank=True, max_length=50, null=True)),
('name_first', models.CharField(blank=True, max_length=50, null=True)),
('name_last', models.CharField(blank=True, max_length=50, null=True)),
('name_given', models.CharField(blank=True, max_length=50, null=True)),
('weight', models.CharField(blank=True, max_length=50, null=True)),
('height', models.CharField(blank=True, max_length=50, null=True)),
('bats', models.CharField(blank=True, max_length=50, null=True)),
('throws', models.CharField(blank=True, max_length=50, null=True)),
('debut', models.CharField(blank=True, max_length=50, null=True)),
('final_game', models.CharField(blank=True, max_length=50, null=True)),
('retro_code', models.CharField(blank=True, max_length=50, null=True)),
('bbref_code', models.CharField(blank=True, max_length=50, null=True)),
],
),
migrations.CreateModel(
name='Pitching',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('year_code', models.CharField(blank=True, max_length=50, null=True)),
('stint', models.CharField(blank=True, max_length=50, null=True)),
('team_code', models.CharField(blank=True, max_length=50, null=True)),
('league_code', models.CharField(blank=True, max_length=50, null=True)),
('wins', models.CharField(blank=True, max_length=50, null=True)),
('losses', models.CharField(blank=True, max_length=50, null=True)),
('games', models.CharField(blank=True, max_length=50, null=True)),
('games_started', models.CharField(blank=True, max_length=50, null=True)),
('completed_games', models.CharField(blank=True, max_length=50, null=True)),
('shutouts', models.CharField(blank=True, max_length=50, null=True)),
('saves', models.CharField(blank=True, max_length=50, null=True)),
('outs_pitched', models.CharField(blank=True, max_length=50, null=True)),
('hits', models.CharField(blank=True, max_length=50, null=True)),
('earned_runs', models.CharField(blank=True, max_length=50, null=True)),
('homeruns', models.CharField(blank=True, max_length=50, null=True)),
('walks', models.CharField(blank=True, max_length=50, null=True)),
('strikeouts', models.CharField(blank=True, max_length=50, null=True)),
('opponent_batting_average', models.CharField(blank=True, max_length=50, null=True)),
('earned_run_average', models.CharField(blank=True, max_length=50, null=True)),
('intentional_walks', models.CharField(blank=True, max_length=50, null=True)),
('wild_pitches', models.CharField(blank=True, max_length=50, null=True)),
('batters_hit_by_pitch', models.CharField(blank=True, max_length=50, null=True)),
('balks', models.CharField(blank=True, max_length=50, null=True)),
('batters_faced_by_pitcher', models.CharField(blank=True, max_length=50, null=True)),
('games_finished', models.CharField(blank=True, max_length=50, null=True)),
('runs_allowed', models.CharField(blank=True, max_length=50, null=True)),
('sacrifices_by_opp_batters', models.CharField(blank=True, max_length=50, null=True)),
('sacrifice_flies_by_opp_batters', models.CharField(blank=True, max_length=50, null=True)),
('grounded_into_double_plays_by_opp_batters', models.CharField(blank=True, max_length=50, null=True)),
('player_code', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='baseball_api_app.Master')),
],
),
migrations.AddField(
model_name='fielding',
name='player_code',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='baseball_api_app.Master'),
),
migrations.AddField(
model_name='batting',
name='player_code',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='baseball_api_app.Master'),
),
]
|
import csv
import time
from kafka import KafkaProducer
# 实例化一个KafkaProducer示例,用于向Kafka投递消息
producer = KafkaProducer(bootstrap_servers='localhost:9092')
# 打开数据文件
csvfile = open("../data/log_result.csv", "r",encoding='utf-8')
# 生成一个可用于读取csv文件的reader
reader = csv.reader(csvfile)
for line in reader:
sex = line[10] # 性别在每行日志代码的第9个元素
if sex == 'gender':
continue # 去除第一行表头
time.sleep(0.1) # 每隔0.1秒发送一行数据
# 发送数据,topic为'sex'
producer.send('sex', sex.encode('utf8')) |
import gui as pychgui
import game as gm
import neuralNetwork as nn
import pickle as pck
def main():
fname = "trainedNetwork.pkl"
try:
networkfile = open(fname, 'rb')
network = pck.load( networkfile )
except Exception as exc:
print (str(exc))
neurons = [5*64,int(0.66*5*64),1]
network = nn.Network( neurons )
app = pychgui.PyCheckerGUI()
app.game.p1.setHumanUser()
app.game.p2.setNeuralNetwork( network, app.game )
app.game.p1.name = "David Kleiven"
app.game.p2.name = "Computer"
app.saveLastState = True
app.play()
if __name__ == "__main__":
main()
|
from flask import request, jsonify
from flask.views import MethodView
from src.DataTables.Tables import RegionalCapacities, GlobalCapacities, Offers
import json
import decimal
from itertools import groupby
from operator import itemgetter
from collections import defaultdict
# helper method to encode decimals as strings when jsonifying
def _json_encode_decimal(obj):
if isinstance(obj, decimal.Decimal):
return str(obj)
raise TypeError(repr(obj) + " is not JSON serializable")
# since python does not support tail recursion, this probably breaks with big result sets...
def _unpacker(dict, grouped_values, keys):
if len(keys) == 1:
dict[keys[0]].extend(grouped_values)
else:
a, *b = keys
if a in dict:
_unpacker(dict[a], grouped_values, b)
else:
dict[a] = defaultdict(list)
_unpacker(dict[a], grouped_values, b)
# helper method to create a nested dict from a result set
# nesting happens in order of the keys
def _to_nested_dict(dict, *keys):
d = defaultdict(list)
grouped = groupby(dict, key=itemgetter(*keys))
for keys, values in grouped:
_unpacker(d, values, keys)
return d
class GetProjectRegionalAndGlobalCaps(MethodView):
def __init__(self, dal):
self.dal = dal
def get(self):
project_id = request.args.get('project_id')
result = self.dal.session.query(Offers, GlobalCapacities, RegionalCapacities)\
.join(GlobalCapacities, GlobalCapacities.supplier == Offers.supplier) \
.join(RegionalCapacities, RegionalCapacities.supplier == Offers.supplier) \
.filter(Offers.project_id.like(project_id)) \
.with_entities(
Offers.supplier,
RegionalCapacities.region,
RegionalCapacities.country_supplier,
RegionalCapacities.country_destination,
GlobalCapacities.capacity.label("global_cap"),
RegionalCapacities.capacity.label("regional_cap")) \
.all()
grouped = _to_nested_dict([dict(row) for row in result], "supplier", "region")
return jsonify(json.dumps({project_id: grouped}, default=_json_encode_decimal)), 200
class GetRegionalAndGlobalCapacitiesWithOffers(MethodView):
def __init__(self, dal):
self.dal = dal
def get(self):
result = self.dal.session.query(Offers, GlobalCapacities, RegionalCapacities) \
.join(GlobalCapacities, GlobalCapacities.supplier == Offers.supplier) \
.join(RegionalCapacities, RegionalCapacities.supplier == Offers.supplier) \
.with_entities(
Offers.supplier,
RegionalCapacities.region,
RegionalCapacities.country_supplier,
RegionalCapacities.country_destination,
GlobalCapacities.capacity.label("global_cap"),
RegionalCapacities.capacity.label("regional_cap"),
Offers.item)\
.all()
grouped = _to_nested_dict([dict(row) for row in result], "supplier", "region", "item")
return jsonify(json.dumps(grouped, default=_json_encode_decimal)), 200
class GetOfferCapacities(MethodView):
def __init__(self, dal):
self.dal = dal
def get(self):
item_id = request.args.get('item_id')
result = self.dal.session.query(Offers, GlobalCapacities, RegionalCapacities) \
.join(GlobalCapacities, GlobalCapacities.supplier == Offers.supplier) \
.join(RegionalCapacities, RegionalCapacities.supplier == Offers.supplier) \
.filter(Offers.item.like(item_id)) \
.with_entities(
Offers.item,
Offers.supplier,
Offers.capacity.label("item_cap"),
RegionalCapacities.region,
RegionalCapacities.country_supplier,
RegionalCapacities.country_destination,
GlobalCapacities.capacity.label("global_cap"),
RegionalCapacities.capacity.label("regional_cap"),
Offers.unit_price) \
.all()
grouped = _to_nested_dict([dict(row) for row in result], "item", "supplier", "region")
return jsonify(json.dumps(grouped, default=_json_encode_decimal)), 200
class AddRegionalCapacity(MethodView):
def __init__(self, dal):
self.dal = dal
def post(self):
json_data = request.get_json(force=True)
new_datum = {
'country_supplier': json_data[0].lower(),
'region': json_data[1].lower(),
'country_destination': json_data[2].lower(),
'type': json_data[3].lower(),
'supplier': json_data[4].lower(),
'capacity': json_data[5]
}
new_model = RegionalCapacities(**new_datum)
self.dal.insert(new_model)
return jsonify(new_datum), 201
class AddGlobalCapacity(MethodView):
def __init__(self, dal):
self.dal = dal
def post(self):
json_data = request.get_json()
new_datum = {
'supplier': json_data['supplier'].lower(),
'capacity': json_data['capacity']
}
new_model = GlobalCapacities(**new_datum)
self.dal.insert([new_model])
return jsonify(new_datum), 201
class AddItemCapacity(MethodView):
def __init__(self, dal):
self.dal = dal
def post(self):
json_data = request.get_json(force=True)
new_datum = {
'item': json_data['item'],
'supplier': json_data['supplier'].lower(),
'unit_price': json_data['unit_price'],
'project_id': json_data['project_id'],
'capacity': json_data['capacity']
}
new_model = Offers(**new_datum)
self.dal.insert([new_model])
return jsonify(new_datum), 201
|
miastoA = input("Miasto startowe: ")
miastoB = input("Miasto końcowe: ")
dystans = int(input(f"Podaj odległość między {miastoA}-{miastoB}: "))
cena = float(input("Podaj cenę paliwa: "))
spalanie = float(input("Podaj średnie spalanie na 100km: "))
koszt = float(dystans/100 * spalanie * cena)
print()
koszt = round(koszt, 2)
print(f"Koszt porzejazdu {miastoA}-{miastoB} to {koszt}zł.") |
from selenium import webdriver
from time import sleep
try:
driver = webdriver.Chrome()
driver.maximize_window()
driver.get('https://www.baidu.com')
# tree.xpath('//*[@id="kw"]/@maxlength')
# 获取指定属性的值
# maxlength = driver.find_element_by_xpath('//*[@id="kw"]').get_attribute('maxlength')
# print('maxength=%s'%maxlength)
# 用class定位元素
elements = driver.find_elements_by_class_name('mnav') # 返回一个列表
for e in elements:
print(e.get_attribute('href'))#attribute 属性
if e.get_attribute('href') == "http://tieba.baidu.com/":
e.click()
except Exception as e:
print(e)
finally:
sleep(3)
driver.quit() |
#
# [224] Basic Calculator
#
# https://leetcode.com/problems/basic-calculator/description/
#
# algorithms
# Hard (28.53%)
# Total Accepted: 64K
# Total Submissions: 224.4K
# Testcase Example: '"1 + 1"'
#
# Implement a basic calculator to evaluate a simple expression string.
#
# The expression string may contain open ( and closing parentheses ), the plus
# + or minus sign -, non-negative integers and empty spaces .
#
# You may assume that the given expression is always valid.
#
# Some examples:
#
# "1 + 1" = 2
# " 2-1 + 2 " = 3
# "(1+(4+5+2)-3)+(6+8)" = 23
#
#
#
#
# Note: Do not use the eval built-in library function.
#
#
class Solution(object):
def calculate(self, s):
res, num, sign, stack = 0, 0, 1, []
for i in s:
if i.isdigit():
num = 10*num + int(i)
elif i == "+":
res += num * sign
num = 0
sign = 1
elif i == '-':
res += num*sign
num = 0
sign = -1
elif i == "(":
stack.append(res)
stack.append(sign)
sign = 1
res = 0
elif i == ")":
res += num * sign
res *= stack.pop()
res += stack.pop()
num = 0
if num != 0:
res += sign * num
return res
|
import os
import time
import atexit
import socket
import pathlib
import logging
import tempfile
import unittest
import subprocess
from contextlib import contextmanager
from typing import Optional
import docker
LOGGER = logging.getLogger(__package__)
logging.basicConfig(level=logging.DEBUG)
DOCKER_IMAGE = "mariadb:10"
# MySQL daemons default listing port
DEFAULT_PORT = 3306
# Environment variables passed to MySQL container
DOCKER_ENV = {
"MYSQL_DATABASE": "db",
"MYSQL_USER": "user",
"MYSQL_PASSWORD": "pass",
"MYSQL_RANDOM_ROOT_PASSWORD": "yes",
}
# MySQL server config file
MY_CONF = """[mysqld]
ssl-ca=/conf/certs/ca.pem
ssl-cert=/conf/certs/server-cert.pem
ssl-key=/conf/certs/server-key.pem
require_secure_transport=ON
"""
DOCKER_NA: str = "Failed to connect to docker daemon"
DOCKER_AVAILABLE: bool = False
try:
DOCKER_CLIENT: docker.DockerClient = docker.from_env()
DOCKER_AVAILABLE = DOCKER_CLIENT.ping()
DOCKER_CLIENT.close()
except:
pass
class MySQLFailedToStart(Exception):
pass # pragma: no cov
def check_connection(addr: str, port: int, *, timeout: float = 0.1) -> bool:
"""
Attempt to make a TCP connection. Return if a connection was made in
less than ``timeout`` seconds. Return True if a connection is made within
the timeout.
"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(float(timeout))
try:
s.connect((addr, port))
except Exception as error:
return False
return True
def mkcleanup(docker_client, container):
"""
Create a function which will remove the temporary file and stop the
container. The function will register itself with the :py:`atexit` module to
ensure that the container is stopped before Python exits. It will unregister
itself whenever it is called.
"""
func = None
def cleanup():
atexit.unregister(func)
try:
container.stop()
container.wait()
except:
pass
docker_client.close()
func = cleanup
atexit.register(func)
return cleanup
@contextmanager
def mysql(*, sql_setup: Optional[str] = None):
"""
Start a MySQL container and yield the IP of the container once ready for
connections. ``sql_setup`` should be the .sql file used to initialize the
database.
"""
if not DOCKER_AVAILABLE:
raise unittest.SkipTest("Need docker to run MySQL")
docker_client: docker.DockerClient = docker.from_env()
with tempfile.TemporaryDirectory() as tempdir:
# Create server config
sql_conf_path = pathlib.Path(tempdir, "my.conf")
sql_conf_path.write_text(MY_CONF)
sql_conf_path.chmod(0o660)
# Create cert folder
cert_dir_path = pathlib.Path(tempdir, "certs")
cert_dir_path.mkdir(mode=0o755)
# Volumes to mount
volumes = {
sql_conf_path.resolve(): {"bind": "/etc/mysql/conf.d/ssl.cnf"},
cert_dir_path.resolve(): {"bind": "/conf/certs/"},
}
# Dump out SQL query to file
if sql_setup is not None:
sql_setup_path = pathlib.Path(tempdir, "dump.sql")
sql_setup_path.write_text(sql_setup)
sql_setup_path.chmod(0o555)
volumes[sql_setup_path.resolve()] = {
"bind": "/docker-entrypoint-initdb.d/dump.sql"
}
# Tell the docker daemon to start MySQL
LOGGER.debug("Starting MySQL...")
container = docker_client.containers.run(
DOCKER_IMAGE,
command="bash -c 'set -x; while ! test -f /conf/certs/ready; do sleep 0.1; done; chown mysql:mysql /etc/mysql/conf.d/ssl.cnf && chown mysql:mysql /conf/certs/* && ls -lAF /conf/certs/ && cat /conf/certs/server-*.pem && bash -xe /usr/local/bin/docker-entrypoint.sh mysqld'",
environment=DOCKER_ENV,
detach=True,
auto_remove=True,
volumes=volumes,
)
# Sometimes very bad things happen, this ensures that the container will
# be cleaned up on process exit no matter what
cleanup = mkcleanup(docker_client, container)
try:
# Get the IP from the docker daemon
inspect = docker_client.api.inspect_container(container.id)
container_ip = inspect["NetworkSettings"]["IPAddress"]
# Create certificate
# From: https://mariadb.com/kb/en/library/certificate-creation-with-openssl/
cmds = [
["openssl", "genrsa", "-out", "ca-key.pem", "2048"],
[
"openssl",
"req",
"-new",
"-x509",
"-nodes",
"-key",
"ca-key.pem",
"-out",
"ca.pem",
"-subj",
f"/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Root Cert/CN=mysql.unittest",
],
[
"openssl",
"req",
"-newkey",
"rsa:2048",
"-nodes",
"-keyout",
"server-key.pem",
"-out",
"server-req.pem",
"-subj",
f"/C=US/ST=Oregon/L=Portland/O=Company Name/OU=Server Cert/CN=mysql.unittest",
],
[
"openssl",
"rsa",
"-in",
"server-key.pem",
"-out",
"server-key.pem",
],
[
"openssl",
"x509",
"-req",
"-in",
"server-req.pem",
"-days",
"1",
"-CA",
"ca.pem",
"-CAkey",
"ca-key.pem",
"-set_serial",
"01",
"-out",
"server-cert.pem",
],
]
for cmd in cmds:
subprocess.call(cmd, cwd=cert_dir_path.resolve())
root_cert_path = pathlib.Path(cert_dir_path, "ca.pem")
server_cert_path = pathlib.Path(cert_dir_path, "server-cert.pem")
server_key_path = pathlib.Path(cert_dir_path, "server-key.pem")
server_cert_path.chmod(0o660)
server_key_path.chmod(0o660)
pathlib.Path(cert_dir_path, "ready").write_text("ready")
# Wait until MySQL reports it's ready for connections
container_start_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
ready = 0
for line in container.logs(stream=True, follow=True):
now_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
LOGGER.debug(
"MySQL log (%0.02f seconds): %s",
(now_time - container_start_time),
line.decode(errors="ignore").strip(),
)
if b"ready for connections" in line:
ready += 1
if ready == 2:
break
if ready != 2:
raise MySQLFailedToStart('Never saw "ready for connections"')
# Ensure that we can make a connection
start_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
max_timeout = float(os.getenv("MYSQL_START_TIMEOUT", "600"))
LOGGER.debug(
"Attempting to connect to MySQL: Timeout of %d seconds",
max_timeout,
)
while not check_connection(container_ip, DEFAULT_PORT):
end_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
if (end_time - start_time) >= max_timeout:
raise MySQLFailedToStart("Timed out waiting for MySQL")
end_time = time.clock_gettime(time.CLOCK_MONOTONIC_RAW)
LOGGER.debug(
"MySQL running: Took %0.02f seconds",
end_time - container_start_time,
)
# Yield IP of container to caller
yield container_ip, root_cert_path.resolve()
finally:
cleanup()
|
import datetime
from collections import defaultdict, namedtuple
from django.core.exceptions import ValidationError
from django.db.models import Q
from django.db import transaction
from api.models.CompliancePeriod import CompliancePeriod
from api.models.CreditTradeHistory import CreditTradeHistory
from api.models.CreditTradeStatus import CreditTradeStatus
from api.models.Organization import Organization
from api.models.OrganizationBalance import OrganizationBalance
from api.models.CreditTrade import CreditTrade
from api.exceptions import PositiveIntegerException
from api.notifications.notification_types import NotificationType
from api.async_tasks import async_send_notification, async_send_notifications
from django.db.transaction import on_commit
class CreditTradeService(object):
"""
Helper functions for Credit Trades
"""
@staticmethod
def get_organization_credit_trades(organization):
"""
Fetch the credit transactions with various rules based on the user's
organization
"""
# Government Organization -- assume OrganizationType id 1 is gov
gov_org = Organization.objects.get(type=1)
if organization == gov_org:
# If organization == Government
# don't show "Cancelled" transactions
# don't show "Refused" transactions
# don't show "Draft", "Submitted" transactions unless the
# initiator was government
# (Please note that government creating drafts and submitted is
# for testing only, in reality government will not do this)
credit_trades = CreditTrade.objects.filter(
~Q(status__status__in=["Cancelled"]) &
(~Q(status__status__in=["Draft", "Submitted", "Refused"]) |
Q(initiator=organization))
)
else:
# If organization == Fuel Supplier
# don't show "Approved" transactions (only show Completed)
# don't show "Cancelled" transactions
# don't show "Draft" transactions unless the initiator was
# the fuel supplier
# show "Submitted" and other transactions where the fuel
# supplier is the respondent
credit_trades = CreditTrade.objects.filter((
((~Q(status__status__in=[
"Recorded", "Cancelled"
]) &
Q(type__is_gov_only_type=False)) |
(Q(status__status__in=[
"Approved", "Declined"
]) &
Q(type__is_gov_only_type=True))) &
((~Q(status__status__in=["Draft"]) &
Q(respondent=organization)) | Q(initiator=organization))
))
return credit_trades
@staticmethod
def create_history(credit_trade, is_new=False):
"""
Create the CreditTradeHistory
"""
user = (
credit_trade.create_user
if is_new
else credit_trade.update_user)
role_id = None
if user.roles.filter(name="GovDirector").exists():
role_id = user.roles.get(name="GovDirector").id
elif user.roles.filter(name="GovDeputyDirector").exists():
role_id = user.roles.get(name="GovDeputyDirector").id
else:
role_id = user.roles.first().id
zero_reason = None
if credit_trade.zero_reason is not None:
zero_reason = credit_trade.zero_reason.id
history = CreditTradeHistory(
credit_trade_id=credit_trade.id,
respondent_id=credit_trade.respondent.id,
status_id=credit_trade.status.id,
type_id=credit_trade.type.id,
number_of_credits=credit_trade.number_of_credits,
fair_market_value_per_credit=credit_trade.
fair_market_value_per_credit,
zero_reason_id=zero_reason,
trade_effective_date=credit_trade.trade_effective_date,
compliance_period_id=credit_trade.compliance_period_id,
is_rescinded=credit_trade.is_rescinded,
create_user=user,
user_role_id=role_id
)
# Validate
try:
history.full_clean()
except ValidationError as error:
# TODO: Do something based on the errors contained in
# e.message_dict
# Display them to a user, or handle them programmatically.
raise ValidationError(error)
history.save()
@staticmethod
def approve(credit_trade, update_user=None):
"""
Transfers the credits between the organizations
Sets the Credit Transfer to Approved
"""
status_approved = CreditTradeStatus.objects.get(status="Approved")
effective_date = datetime.date.today()
CreditTradeService.transfer_credits(
credit_trade.credits_from,
credit_trade.credits_to,
credit_trade.id,
credit_trade.number_of_credits,
effective_date
)
if update_user:
credit_trade.update_user = update_user
credit_trade.status = status_approved
CreditTradeService.create_history(credit_trade)
credit_trade.save()
return credit_trade
@staticmethod
@transaction.non_atomic_requests()
def transfer_credits(_from, _to, credit_trade_id, num_of_credits,
effective_date):
"""
Make the appropriate addition and reduction to the credits for the
organizations Involved
"""
from_starting_bal, _ = OrganizationBalance.objects.get_or_create(
organization_id=_from.id,
expiration_date=None,
defaults={'validated_credits': 0})
to_starting_bal, _ = OrganizationBalance.objects.get_or_create(
organization_id=_to.id,
expiration_date=None,
defaults={'validated_credits': 0})
# Compute for end balance
from_credits = from_starting_bal.validated_credits - num_of_credits
to_credits = to_starting_bal.validated_credits + num_of_credits
if from_credits < 0:
raise PositiveIntegerException("Can't complete transaction,"
"`{}` has insufficient credits"
.format(_from.name))
# Update old balance effective date
from_starting_bal.expiration_date = effective_date
to_starting_bal.expiration_date = effective_date
# Create new fuel supplier balance
from_new_bal = OrganizationBalance(
organization=_from,
validated_credits=from_credits,
effective_date=effective_date,
credit_trade_id=credit_trade_id
)
to_new_bal = OrganizationBalance(
organization=_to,
validated_credits=to_credits,
effective_date=effective_date,
credit_trade_id=credit_trade_id
)
# Save everything
from_starting_bal.save()
to_starting_bal.save()
from_new_bal.save()
to_new_bal.save()
@staticmethod
def validate_credits(credit_trades):
"""
Checks and makes sure that the organizations have enough credit
balance
"""
errors = []
temp_storage = []
for credit_trade in credit_trades:
from_starting_index, from_starting_balance = CreditTradeService. \
get_temp_balance(temp_storage, credit_trade.credits_from.id)
to_starting_index, to_starting_balance = CreditTradeService. \
get_temp_balance(temp_storage, credit_trade.credits_to.id)
from_credits_remaining = from_starting_balance - \
credit_trade.number_of_credits
to_credits_remaining = to_starting_balance + \
credit_trade.number_of_credits
CreditTradeService.update_temp_balance(
temp_storage,
from_starting_index,
from_credits_remaining,
credit_trade.credits_from.id)
CreditTradeService.update_temp_balance(
temp_storage,
to_starting_index,
to_credits_remaining,
credit_trade.credits_to.id)
if from_credits_remaining < 0:
errors.append(
"[ID: {}] "
"Can't complete transaction,"
"`{}` has insufficient credits.".format(
credit_trade.id, credit_trade.credits_from.name
)
)
if errors:
raise PositiveIntegerException(errors)
@staticmethod
def get_temp_balance(storage, organization_id):
"""
Gets the credits of an organization stored in a temporary list
This allows us to simulate credit transfers without actually
needing to write to the database. (e.g. Lets find out if
the organization has enough credits to do the transfer)
"""
starting_balance = None
index = None
if storage:
for balance_index, balance in enumerate(storage):
if balance["id"] == organization_id:
starting_balance = balance["credits"]
index = balance_index
if starting_balance is None:
try: # if balance hasn't been populated, get from the database
organization_balance = OrganizationBalance.objects.get(
organization_id=organization_id,
expiration_date=None)
starting_balance = organization_balance.validated_credits
except OrganizationBalance.DoesNotExist:
starting_balance = 0
return index, starting_balance
@staticmethod
def get_compliance_period_id(credit_trade):
"""
Gets the compliance period the effective date falls under
"""
compliance_period = CompliancePeriod.objects.filter(
effective_date__lte=credit_trade.trade_effective_date,
expiration_date__gte=credit_trade.trade_effective_date
).first()
if compliance_period is None:
return None
return compliance_period.id
@staticmethod
def update_temp_balance(storage, index, num_of_credits, organization_id):
"""
Update the temporary list that contains the credits for the
organizations
"""
if index is None:
storage.append({
"id": organization_id,
"credits": num_of_credits
})
else:
storage[index]["credits"] = num_of_credits
@staticmethod
def get_allowed_statuses(credit_trade, request):
"""
This is used for validation.
This will return a list of statuses that the credit trade can be
updated to.
e.g. Draft -> Proposed
Proposed -> Accepted, Rejected
IDIR users uses the PROPOSE_CREDIT_TRANSFER permission to create PVRs
"""
allowed_statuses = []
# Non-government users can never make changes to government only
# transactions
if credit_trade.type.is_gov_only_type and \
not request.user.is_government_user:
return allowed_statuses
if credit_trade.status.status == "Draft":
if request.user.has_perm('PROPOSE_CREDIT_TRANSFER'):
allowed_statuses.append("Cancelled")
allowed_statuses.append("Draft")
if (request.user.has_perm('SIGN_CREDIT_TRANSFER') and
credit_trade.initiator == request.user.organization):
allowed_statuses.append("Submitted")
if (credit_trade.type.is_gov_only_type and
request.user.has_perm('RECOMMEND_CREDIT_TRANSFER')):
allowed_statuses.append("Recommended")
elif (credit_trade.status.status == "Submitted" and
credit_trade.respondent == request.user.organization):
if request.user.has_perm('REFUSE_CREDIT_TRANSFER'):
allowed_statuses.append("Refused")
if request.user.has_perm('SIGN_CREDIT_TRANSFER'):
allowed_statuses.append("Accepted")
elif credit_trade.status.status == "Accepted":
if request.user.has_perm('RECOMMEND_CREDIT_TRANSFER'):
allowed_statuses.append("Recommended")
allowed_statuses.append("Not Recommended")
elif credit_trade.status.status in [
"Not Recommended", "Recommended"
]:
if request.user.has_perm('APPROVE_CREDIT_TRANSFER'):
allowed_statuses.append("Approved")
if request.user.has_perm('DECLINE_CREDIT_TRANSFER'):
allowed_statuses.append("Declined")
if (credit_trade.type.is_gov_only_type and (
request.user.has_perm('DECLINE_CREDIT_TRANSFER') or
request.user.has_perm('RESCIND_CREDIT_TRANSFER')
)):
allowed_statuses.append("Draft")
elif credit_trade.status.status == "Recorded":
if request.user.has_perm('APPROVE_CREDIT_TRANSFER') or \
request.user.has_perm('USE_HISTORICAL_DATA_ENTRY'):
allowed_statuses.append("Recorded")
return allowed_statuses
@staticmethod
def dispatch_notifications(previous_state: CreditTrade,
credit_trade: CreditTrade):
if credit_trade.type.is_gov_only_type:
return CreditTradeService.pvr_notification(
previous_state, credit_trade)
return CreditTradeService.credit_trade_notification(
previous_state, credit_trade)
@staticmethod
def credit_trade_notification(_previous_state, credit_trade):
notification_map = defaultdict(lambda: [])
government = Organization.objects.filter(
type__type='Government').first()
StatusChange = namedtuple('StatusChange', ['new_status'])
ResultingNotification = namedtuple('ResultingNotification', [
'recipient', 'notification_type'])
notification_map[StatusChange('Draft')] = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_CREATED)
]
notification_map[StatusChange('Submitted')] = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_SIGNED_1OF2),
ResultingNotification(
credit_trade.respondent,
NotificationType.CREDIT_TRANSFER_SIGNED_1OF2)
]
notification_map[StatusChange('Accepted')] = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_SIGNED_2OF2),
ResultingNotification(
credit_trade.respondent,
NotificationType.CREDIT_TRANSFER_SIGNED_2OF2),
ResultingNotification(
government, NotificationType.CREDIT_TRANSFER_SIGNED_2OF2),
]
notification_map[StatusChange('Refused')] = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_PROPOSAL_REFUSED),
ResultingNotification(
credit_trade.respondent,
NotificationType.CREDIT_TRANSFER_PROPOSAL_REFUSED)
]
notification_map[StatusChange('Recommended')] = [
ResultingNotification(
government,
NotificationType.CREDIT_TRANSFER_RECOMMENDED_FOR_APPROVAL),
]
notification_map[StatusChange('Not Recommended')] = [
ResultingNotification(
government,
NotificationType.CREDIT_TRANSFER_RECOMMENDED_FOR_DECLINATION),
]
notification_map[StatusChange('Approved')] = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_APPROVED),
ResultingNotification(
credit_trade.respondent,
NotificationType.CREDIT_TRANSFER_APPROVED),
ResultingNotification(
government,
NotificationType.CREDIT_TRANSFER_APPROVED),
]
notification_map[StatusChange('Declined')] = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_DECLINED),
ResultingNotification(
credit_trade.respondent,
NotificationType.CREDIT_TRANSFER_DECLINED),
ResultingNotification(
government,
NotificationType.CREDIT_TRANSFER_DECLINED),
]
if credit_trade.is_rescinded:
notifications_to_send = [
ResultingNotification(
credit_trade.initiator,
NotificationType.CREDIT_TRANSFER_RESCINDED),
ResultingNotification(
credit_trade.respondent,
NotificationType.CREDIT_TRANSFER_RESCINDED)
]
else:
notifications_to_send = notification_map[
StatusChange(credit_trade.status.status)
]
ps = []
for notification in notifications_to_send:
ps.append({
'interested_organization_id': notification.recipient.id,
'message': notification.notification_type.name,
'notification_type': notification.notification_type.value,
'related_credit_trade_id': credit_trade.id,
'related_organization_id': credit_trade.respondent.id,
'originating_user_id': credit_trade.update_user.id
})
on_commit(lambda: async_send_notifications(ps))
@staticmethod
def pvr_notification(previous_state, credit_trade):
notification_map = defaultdict(lambda: [])
government = Organization.objects.filter(
type__type='Government').first()
StatusChange = namedtuple('StatusChange', ['new_status'])
ResultingNotification = namedtuple('ResultingNotification', [
'recipient', 'notification_type'])
if previous_state and \
previous_state.status.status == 'Recommended' and \
previous_state.update_user == credit_trade.update_user:
notification_map[StatusChange('Draft')] = [
ResultingNotification(
government,
NotificationType.PVR_PULLED_BACK)
]
elif previous_state and \
previous_state.status.status == 'Recommended' and \
previous_state.update_user != credit_trade.update_user:
notification_map[StatusChange('Draft')] = [
ResultingNotification(
government,
NotificationType.PVR_RETURNED_TO_ANALYST)
]
else:
notification_map[StatusChange('Draft')] = [
ResultingNotification(
government,
NotificationType.PVR_CREATED)
]
notification_map[StatusChange('Recommended')] = [
ResultingNotification(
government,
NotificationType.PVR_RECOMMENDED_FOR_APPROVAL),
]
notification_map[StatusChange('Approved')] = [
ResultingNotification(
credit_trade.respondent,
NotificationType.PVR_APPROVED),
ResultingNotification(
government,
NotificationType.PVR_APPROVED),
]
notification_map[StatusChange('Declined')] = [
ResultingNotification(
credit_trade.respondent,
NotificationType.PVR_DECLINED),
ResultingNotification(
government,
NotificationType.PVR_DECLINED),
]
notifications_to_send = notification_map[
StatusChange(
credit_trade.status.status
)
]
ps = []
for notification in notifications_to_send:
ps.append({
'interested_organization_id': notification.recipient.id,
'message': notification.notification_type.name,
'notification_type': notification.notification_type.value,
'related_credit_trade_id': credit_trade.id,
'related_organization_id': credit_trade.respondent.id,
'originating_user_id': credit_trade.update_user.id
})
on_commit(lambda:
async_send_notifications(ps)
)
|
"""
*****************************************************************************
FILE: Game.py
AUTHOR: Cal Reynolds
PARTNER: n/a
ASSIGNMENT: Project 6
DATE: 4/7/2017
DESCRIPTION: Scrabble!
*****************************************************************************
"""
import random
from cs110graphics import *
class Pieces:
""" Creates group of scrabble letter pieces """
def __init__(self, board):
""" creates set amount of each letter piece each with unique value """
self._pieceList = []
self._cx = 500
self._cy = 100
#establishes number of each letter piece to make each with certain value
letters = [['E', 12, 1], ['A', 9, 1], ['I', 9, 1], ['O', 8, 1],
['N', 6, 1], ['R', 6, 1], ['L', 4, 1], ['U', 4, 1],
['D', 4, 2], ['G', 3, 2], ['B', 2, 3], ['C', 2, 3],
['M', 2, 3], ['P', 2, 3], ['F', 2, 4], ['H', 2, 4],
['V', 2, 4], ['W', 2, 4], ['Y', 2, 4], ['K', 1, 5],
['J', 1, 8], ['X', 1, 8], ['Q', 1, 10], ['Z', 1, 10]]
for r in range(len(letters)):
for _ in range(letters[r][1]):
piece = SinglePiece(letters[r][0], letters[r][2],
(self._cx, self._cy), board)
piece.setDepth(15)
self._pieceList.append(piece)
self._board = board
self._dealtPiece = None
def shuffle(self):
""" shuffles the list of pieces """
random.shuffle(self._pieceList)
def dealPiece(self):
""" deals a piece from the list of pieces and removes it from list """
self._dealtPiece = self._pieceList[0]
self._pieceList.remove(self._pieceList[0])
if len(self._pieceList) == 0:
Endbutton(self._board, self._board.getWin())
return self._dealtPiece
class SinglePiece(EventHandler):
""" Creates a Scrabble Letter Piece """
def __init__(self, letter, value, center, board):
""" Establishes letter, value, position, and properties of piece """
EventHandler.__init__(self)
self._cx, self._cy = center
self._letter = Text(letter, (self._cx, self._cy+7), 15)
self._value = Text(value, (self._cx + 10, self._cy + 10), 8)
self._numberValue = value
self._center = center
self._piece = Square(29, (self._cx, self._cy))
self._piece.setFillColor("beige")
self._piece.setBorderColor("grey")
self._letter.addHandler(self)
self._piece.addHandler(self)
self._value.addHandler(self)
self._moving = False
self._location = (0, 0) # window location of the piece
self._startPos = None # mouse position where movement started
self._active = True
self._player = board.getPlayer()
self._board = board
def activate(self):
""" Activates piece, makes border yellow """
self._active = True
self._piece.setBorderColor('#FFFF00')
self._piece.setFillColor('beige')
def deactivate(self):
""" Deactivates piece, makes boarder and fill color black """
self._active = False
self._piece.setBorderColor('black')
self._piece.setFillColor('black')
def placedOnBoard(self):
""" If piece is on board, deactivates piece and sets border to black """
self._active = False
self._piece.setBorderColor('black')
def addTo(self, win):
""" Adds piece to window """
win.add(self._piece)
win.add(self._letter)
win.add(self._value)
def moveTo(self, position):
""" Moves piece to certain location """
cx, cy = position
self._piece.moveTo(position)
letterPosition = (cx, cy+10)
self._letter.moveTo(letterPosition)
valuePosition = (cx+10, cy+10)
self._value.moveTo(valuePosition)
self._location = position
def move(self, dx, dy):
""" Moves piece by a certain amount in a given direction """
oldx, oldy = self._location
newx = oldx + dx
newy = oldy + dy
self.moveTo((newx, newy))
def highlight(self):
""" Sets border color to green to signify piece is moving """
self._piece.setBorderColor("#7FFFD4")
def setDepth(self, depth):
""" Sets the piece to a certain depth """
self._piece.setDepth(depth+4)
self._value.setDepth(depth)
self._letter.setDepth(depth)
def handleMouseRelease(self, event):
""" if piece != active, cannot be taken. If active, can be placed """
if not self._active:
return
if self._moving:
self.setDepth(25)
self._piece.setBorderColor("yellow")
self._moving = False
self._player.report(self, event)
else:
self.highlight()
self.setDepth(10)
self._moving = True
self._startPos = event.getMouseLocation()
def handleMouseMove(self, event):
""" moves the piece by mouse dragging if active, nothing if not """
if not self._active:
return
if self._moving:
oldx, oldy = self._startPos
newx, newy = event.getMouseLocation()
self.move(newx - oldx, newy - oldy)
self._startPos = self._location
def getLocation(self):
""" returns location of piece """
return self._location
def getpiece(self):
""" returns piece """
return self._piece
def getValue(self):
""" returns number value of piece """
return self._numberValue
class Player:
""" Creates Scrabble Player """
def __init__(self, playerNumb, win, board):
""" Takes player number to instantiate unique player """
self._textCenterX = 540
self._textCenterY = 70
self._pointList = []
self._board = board
playerTxt = Text(('Player ' + str(playerNumb + 1)), (self._textCenterX,\
self._textCenterY + 110 * playerNumb), 30)
win.add(playerTxt)
self._win = win
self._playerNumb = playerNumb
self._playerPieces = [None, None, None, None, None, None, None]
pointTxt = Text(('Points: '), (640, self._textCenterY + 110 * \
self._playerNumb), 10)
win.add(pointTxt)
self._playerPointsWin = None
self._score = 0
self._wordMultiplier = 1
self._numberOfTurns = 0
self._totalPlayerPoints = []
self._points = 0
self._singleWordScore = 0
self._pieces = None
def report(self, piece, event):
""" calls the board to report whether/where a player's piece landed """
self._board.report(piece, event)
def replenish(self, pieces):
""" refill pieces that have been placed on board by player """
self._pieces = pieces
for i in range(7):
if self._playerPieces[i] is None:
dealtPiece = self._pieces.dealPiece()
self._playerPieces[i] = dealtPiece
dealtPiece.addTo(self._win)
dealtPiece.moveTo((self._textCenterX + i * 33, \
self._textCenterY + 110 * self._playerNumb + 30))
dealtPiece.deactivate()
def activateAll(self):
""" activates pieces """
for piece in self._playerPieces:
piece.activate()
def deactivateAll(self):
""" deactivates pieces """
for piece in self._playerPieces:
piece.deactivate()
def removePiece(self, piece):
""" removes piece from player's possession """
for i in range(len(self._playerPieces)):
if self._playerPieces[i] == piece:
self._playerPieces[i] = None
def getPlayerPieces(self):
""" returns a player's pieces """
return self._playerPieces
def getPlayer(self):
""" returns the player """
return self
def addValueList(self, tileInfo):
""" Adds the value of the placed tile to a total sum of points """
tileValue = tileInfo[0]
tileValue *= tileInfo[1]
self._wordMultiplier *= tileInfo[2]
self._singleWordScore += tileValue
self._numberOfTurns += 1
def putOutPoints(self):
""" determines points of player, puts then on window """
if self._points != 0:
self._win.remove(self._playerPointsWin)
self._singleWordScore *= self._wordMultiplier
self._wordMultiplier = 1
self._points += self._singleWordScore
self._singleWordScore = 0
self._playerPointsWin = Text(str(self._points), (672, 70 + 110 * \
(self._playerNumb)), 15)
self._win.add(self._playerPointsWin)
def zeroTurns(self):
""" zeroes the amount of turns taken by player """
self._numberOfTurns = 0
def returnTurn(self):
""" returns how many turns the player has taken """
return self._numberOfTurns
def getPoints(self):
""" returns a player's points """
return self._points
def getPlayerNumb(self):
""" returns the number of the player """
return self._playerNumb
class Changebutton(EventHandler):
""" Button that changes player turn """
def __init__(self, board, win):
""" establishes properties of button """
EventHandler.__init__(self)
self._center = 300, 520
self._button = Rectangle(150, 60, (225, 540))
self._button.setFillColor('#FF8C00')
self._text = Text('End Turn', (225, 547), 25)
win.add(self._button)
win.add(self._text)
self._button.addHandler(self)
self._text.addHandler(self)
self._board = board
def handleMouseRelease(self, event):
""" turn changes when button is clicked """
self._board.putPointsOut()
self._board.getPlayer().zeroTurns()
self._board.changeTurn()
class Endbutton(EventHandler):
""" Button used to declare a winner + points when no more pieces left """
def __init__(self, board, win):
""" Instantiates dimensions of endbutton, color, it as EventHandler """
EventHandler.__init__(self)
self._board = board
self._rectangle = Rectangle(800, 300, (450, 300))
self._rectangle.setFillColor("#48D1CC")
text = ("Congratulations!!!! Player " + \
str(self._board.getWinnerPlayer()+1) + " won Scrabble with " + \
str(self._board.getWinnerPoints()) + " points!")
self._text = Text(text, (450, 320), 30)
self._rectangle.setDepth(5)
self._text.setDepth(4)
self._rectangle.addHandler(self)
self._text.addHandler(self)
win.add(self._rectangle)
win.add(self._text)
self._win = win
def handleMouseRelease(self, event):
""" When button is pushed, the window is closed, ending the program """
self._win.close()
class Tile:
""" Tile to make up scrabble board """
def __init__(self, center, tileType, board):
""" establishes properties of tile """
self._center = center
self._size = 30
self._square = Square(self._size, self._center)
self._color = tileType[0]
self._textLetters = tileType[1]
self._textSize = tileType[4]
self._wordMultiplier = tileType[3]
self._letterMultiplier = tileType[2]
self._text = Text(self._textLetters, self._center, self._textSize)
self._square.setFillColor(self._color)
self._piece = None
self._active = False
self._board = board
self._scrabBoard = None
def addTo(self, win):
""" Adds tile to window """
win.add(self._square)
win.add(self._text)
def getLocation(self):
""" returns location of tile """
return self._center
def getPiece(self):
""" returns tile """
return self._piece
def addPiece(self, piece):
""" adds piece to tile """
self._piece = piece
def getWordMultiplier(self):
""" returns word multiplier """
return self._wordMultiplier
def getLetterMultiplier(self):
""" returns letter multiplier """
return self._letterMultiplier
def activate(self):
""" activates tile, sets border color green to mark """
self._active = True
self._square.setBorderColor('#00FF7F')
def deactivate(self):
""" deactivates tile, sets border color black to mark """
self._active = False
self._square.setBorderColor('black')
def getActiveStatus(self):
""" returns whether tile is active """
return self._active
def activateIfNoPiece(self, r, c):
""" activates tiles in all empty spaces around piece """
self._scrabBoard = self._board.getScrabBoard()
directions = [(-1, 0), (0, -1), (0, 1), (1, 0)]
for dir1 in directions:
dr, dc = dir1
#if first move player's turn, will count neighbor tiles' point value
if self._board.getPlayer().returnTurn() == 0:
if c + dc >= len(self._scrabBoard[r]) or \
r + dr >= len(self._scrabBoard) or \
r + dr < 0 or c + dc < 0:
pass
else:
if self._scrabBoard[r+dr][c+dc].getPiece() != None:
self._board.getPlayer().addValueList(\
((self._scrabBoard[r+dr][c+dc].getPiece().getValue()\
), 1, 1))
else:
self._scrabBoard[r+dr][c+dc].activate()
else:
if c + dc >= len(self._scrabBoard[r]) or \
r + dr >= len(self._scrabBoard) or \
r + dr < 0 or c + dc < 0:
pass
else:
if self._scrabBoard[r+dr][c+dc].getPiece() is None:
self._scrabBoard[r+dr][c+dc].activate()
else:
if self._scrabBoard[r+dr][c+dc].getPiece() != None:
self._scrabBoard[r+dr][c+dc].deactivate()
class Board:
""" Creates board """
def __init__(self, win):
""" Instantiates properties of board, enables game to be played """
#finds number of players, only allows 1-4
playerNumb = int(input("How many players will be participating? (1-4)"))
while playerNumb > 4 or playerNumb < 1:
playerNumb = int(input("How many players will be participating?" + \
" (1-4)"))
self._playerNumb = playerNumb
#instantiates attributes of each type of tile on board
tw = ("red", "TW", 1, 3, 10)
n = ("#DEB887", " ", 1, 1, 10)
dl = ("#40c7ed", "DL", 2, 1, 10)
dw = ("pink", "DW", 1, 2, 10)
tl = ("#0066ff", "TL", 3, 1, 10)
cnt = ("#fdffba", "*", 1, 1, 14)
self._firstSquarePos = (50, 25)
fx, fy = self._firstSquarePos
self._current = playerNumb - 1
self._win = win
#creates list of players
self._players = []
for i in range(playerNumb):
self._players.append(Player(i, win, self))
#creates bag of letter pieces
self._bagofpieces = Pieces(self)
self._bagofpieces.shuffle()
#distributes proper amount of pieces to however many players need
for player in self._players:
player.replenish(self._bagofpieces)
#this grid represents board full of tuples with info to make tile objs.
self._scrabBoard = [[tw, n, n, dl, n, n, n, tw, n, n, n, dl, n, n, tw],
[n, dw, n, n, n, tl, n, n, n, tl, n, n, n, dw, n],
[n, n, dw, n, n, n, dl, n, dl, n, n, n, dw, n, n],
[dl, n, n, dw, n, n, n, dl, n, n, n, dw, n, n, dl],
[n, n, n, n, dw, n, n, n, n, n, dw, n, n, n, n],
[n, tl, n, n, n, tl, n, n, n, tl, n, n, n, tl, n],
[n, n, dl, n, n, n, dl, n, dl, n, n, n, dl, n, n],
[tw, n, n, dl, n, n, n, cnt, n, n, n, dl, n, n, tw],
[n, n, dl, n, n, n, dl, n, dl, n, n, n, dl, n, n],
[n, tl, n, n, n, tl, n, n, n, tl, n, n, n, tl, n],
[n, n, n, n, dw, n, n, n, n, n, dw, n, n, n, n],
[dl, n, n, dw, n, n, n, dl, n, n, n, dw, n, n, dl],
[n, n, dw, n, n, n, dl, n, dl, n, n, n, dw, n, n],
[n, dw, n, n, n, tl, n, n, n, tl, n, n, n, dw, n],
[tw, n, n, dl, n, n, n, tw, n, n, n, dl, n, n, tw]]
#using self._tiles is work around by Prof. Campbell during office hours
self._tiles = []
for r in range(len(self._scrabBoard)):
fy += 30.4
self._tiles.append([])
fx = self._firstSquarePos[1]
for c in range(len(self._scrabBoard[r])):
tile = Tile((fx, fy), self._scrabBoard[r][c], self)
self._tiles[-1].append(tile) #creates board as grid of tiles
tile.addTo(win)
tile.deactivate()
fx += 30.4
self._scrabBoard = self._tiles
a1 = int("7") # Prof. Campbell added this to get around
b1 = int("7") # A Pylint bug.
self._scrabBoard[a1][b1].activate()
Changebutton(self, win)
self.changeTurn()
def changeTurn(self):
""" changes the turn, incremented by one """
self._players[self._current].deactivateAll()
self._current += 1
self._current %= self._playerNumb
self._players[self._current].activateAll()
def computeLanding(self, piece):
""" computes the landing location of the piece on the board """
a1 = int("0") # Prof. Campbell added this to get around
b1 = int("0") # A Pylint bug.
tile = self._scrabBoard[a1][b1]
x0, y0 = tile.getLocation()
x0 -= 15
y0 -= 15
x1, y1 = piece.getLocation()
col = (x1 - x0) // 30
row = (y1 - y0) // 30
if row < 0 or col < 0 or row > 14 or col > 14:
return None
if self._scrabBoard[row][col].getPiece() != None or \
self._scrabBoard[row][col].getActiveStatus() is False:
return None
return row, col
def report(self, piece, event):
""" establishes what to do when piece is placed on board """
if self.computeLanding(piece) != None:
r, c = self.computeLanding(piece)
landing = self._scrabBoard[r][c]
if landing.getActiveStatus():
self._scrabBoard[r][c].activateIfNoPiece(r, c)
piece.placedOnBoard()
landing.addPiece(piece)
piece.moveTo(landing.getLocation()) #snaps piece to fit closest tile
landing.deactivate()
self._players[self._current].removePiece(piece)
self._players[self._current].replenish(self._bagofpieces)
self._players[self._current].addValueList((piece.getValue(),\
landing.getLetterMultiplier(), landing.getWordMultiplier()))
def getPlayer(self):
""" returns the current player """
return self._players[self._current]
def getAllPieces(self):
""" returns all pieces """
return self._bagofpieces
def getScrabBoard(self):
""" returns board of tiles """
return self._scrabBoard
def putPointsOut(self):
""" calls on player to update point score """
self._players[self._current].putOutPoints()
def getWin(self):
""" returns the window """
return self._win
def getWinnerPlayer(self):
""" Returns the player number of the winner """
largestPlayerNumb = self._players[0].getPoints()
largestPlayer = 0
for player in self._players:
if player.getPoints() > largestPlayerNumb:
largestPlayerNumb = player.getPoints()
largestPlayer = player.getPlayerNumb()
return largestPlayer
def getWinnerPoints(self):
""" Returns the points of the winning player """
largestPlayerNumb = self._players[0].getPoints()
for player in self._players:
if player.getPoints() > largestPlayerNumb:
largestPlayerNumb = player.getPoints()
return largestPlayerNumb
def main(win):
""" sets window dimensions and runs program """
win.setWidth(900)
win.setHeight(600)
Board(win)
StartGraphicsSystem(main)
|
# Copyright 2015 Metaswitch Networks
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Usage:
calicoctl pool (add|remove) <CIDRS>... [--ipip] [--nat-outgoing]
calicoctl pool range add <START_IP> <END_IP> [--ipip] [--nat-outgoing]
calicoctl pool show [--ipv4 | --ipv6]
Description:
Configure IP Pools
Options:
--ipv4 Show IPv4 information only
--ipv6 Show IPv6 information only
--nat-outgoing Apply NAT to outgoing traffic
--ipip Use IP-over-IP encapsulation across hosts
"""
import sys
import netaddr
from netaddr import IPNetwork, IPRange, IPAddress
from prettytable import PrettyTable
from pycalico.datastore_datatypes import IPPool
from connectors import client
from utils import (validate_cidr, validate_ip,
get_container_ipv_from_arguments)
def validate_arguments(arguments):
"""
Validate argument values:
<CIDRS>
:param arguments: Docopt processed arguments
"""
# Validate CIDR
cidrs = arguments.get("<CIDRS>")
start_ip = arguments.get("<START_IP>")
end_ip = arguments.get("<END_IP>")
if cidrs:
for cidr in cidrs:
if not validate_cidr(cidr):
print "Invalid CIDR specified %s" % cidr
sys.exit(1)
elif start_ip or end_ip:
if not (validate_ip(start_ip, 4) or validate_ip(start_ip, 6)):
print "Invalid START_IP specified."
sys.exit(1)
elif not (validate_ip(end_ip, 4) or validate_ip(end_ip, 6)):
print "Invalid END_IP specified."
sys.exit(1)
elif IPAddress(start_ip).version != IPAddress(end_ip).version:
print "START_IP and END_IP must be the same ip version"
sys.exit(1)
elif not IPAddress(start_ip) < IPAddress(end_ip):
print "START_IP must be a smaller ip address than END_IP"
sys.exit(1)
def pool(arguments):
"""
Main dispatcher for pool commands. Calls the corresponding helper function.
:param arguments: A dictionary of arguments already processed through
this file's docstring with docopt
:return: None
"""
validate_arguments(arguments)
ip_version = get_container_ipv_from_arguments(arguments)
if arguments.get("add"):
if arguments.get("range"):
ip_pool_range_add(arguments.get("<START_IP>"),
arguments.get("<END_IP>"),
ip_version,
arguments.get("--ipip"),
arguments.get("--nat-outgoing"))
else:
ip_pool_add(arguments.get("<CIDRS>"),
ip_version,
arguments.get("--ipip"),
arguments.get("--nat-outgoing"))
elif arguments.get("remove"):
ip_pool_remove(arguments.get("<CIDRS>"), ip_version)
elif arguments.get("show"):
if not ip_version:
ip_pool_show(4)
ip_pool_show(6)
else:
ip_pool_show(ip_version)
def ip_pool_add(cidrs, version, ipip, masquerade):
"""
Add the given CIDRS to the IP address allocation pool.
:param cidrs: The pools to set in CIDR format, e.g. 192.168.0.0/16
:param version: 4 or 6
:param ipip: Use IP in IP for this pool.
:return: None
"""
if version == 6 and ipip:
print "IP in IP not supported for IPv6 pools"
sys.exit(1)
# TODO Reject any cidrs that overlap with existing cidrs in the pool
for cidr in cidrs:
pool = IPPool(cidr, ipip=ipip, masquerade=masquerade)
client.add_ip_pool(version, pool)
def ip_pool_range_add(start_ip, end_ip, version, ipip, masquerade):
"""
Add the range of ip addresses as CIDRs to the IP address allocation pool.
:param start_ip: The first ip address the ip range.
:param end_ip: The last ip address in the ip range.
:param version: 4 or 6
:param ipip: Use IP in IP for this pool.
:return: None
"""
if version == 6 and ipip:
print "IP in IP not supported for IPv6 pools"
sys.exit(1)
ip_range = IPRange(start_ip, end_ip)
pools = client.get_ip_pools(version)
for pool in pools:
pool_net = IPNetwork(pool.cidr)
# Reject the new ip range if any of the following are true:
# - The new ip range contains all ips of any existing pool
# - An existing pool overlaps ips with the start of the new ip range
# - An existing pool overlaps ips with the end of the new ip range
if (pool_net in ip_range or
start_ip in pool_net or
end_ip in pool_net):
print "Cannot add range - range conflicts with pool %s" % pool.cidr
sys.exit(1)
cidrs = netaddr.iprange_to_cidrs(start_ip, end_ip)
for ip_net in cidrs:
new_pool = IPPool(ip_net.cidr, ipip=ipip, masquerade=masquerade)
client.add_ip_pool(version, new_pool)
def ip_pool_remove(cidrs, version):
"""
Remove the given CIDRs from the IP address allocation pool.
:param cidrs: The pools to remove in CIDR format, e.g. 192.168.0.0/16
:param version: 4 or 6
:return: None
"""
for cidr in cidrs:
try:
client.remove_ip_pool(version, IPNetwork(cidr))
except KeyError:
print "%s is not a configured pool." % cidr
def ip_pool_show(version):
"""
Print a list of IP allocation pools.
:return: None
"""
assert version in (4, 6)
headings = ["IPv%s CIDR" % version, "Options"]
pools = client.get_ip_pools(version)
x = PrettyTable(headings)
for pool in pools:
enabled_options = []
if version == 4:
if pool.ipip:
enabled_options.append("ipip")
if pool.masquerade:
enabled_options.append("nat-outgoing")
# convert option array to string
row = [str(pool.cidr), ','.join(enabled_options)]
x.add_row(row)
print x.get_string(sortby=headings[0])
|
import dash_core_components as dcc
from dash.dependencies import Input, Output
import dash_bootstrap_components as dbc
from datetime import date, timedelta
from app import app
import controllers
from views import SidebarView
import os
DEBUG=True if 'DEBUG' in os.environ and os.environ['DEBUG'] == 'true' else False
CONTENT_STYLE = {
"padding": "2rem 1rem",
}
sidebar = SidebarView()
app.layout = dbc.Container([
dbc.Row([
dcc.Location(id='url', refresh=False),
dbc.Col(sidebar.layout(), width=2),
dbc.Col(id="page-content", style=CONTENT_STYLE, width=10)
], justify="center")
], fluid=True)
@app.callback(Output('page-content', 'children'),
[Input('url', 'pathname'), Input('time-range-slider', 'value')])
def display_page(pathname, value):
if pathname in ['/', '/margin']:
control = controllers.MarginController()
endDate = date(2019, 10, 10)
startDate = endDate - timedelta(days=value)
control.getData(startDate, endDate)
elif pathname == '/inventory':
control = controllers.InventoryController()
control.getData()
else:
return '404'
return control.getLayout()
@app.callback(
Output("margin-collapse", "is_open"),
[Input('url', 'pathname')]
)
def toggle_margin_collpase(pathname):
print("collapse margin")
if pathname in ['/', '/margin']:
return True
return False
if __name__ == '__main__':
app.run_server(host='0.0.0.0', debug=DEBUG)
|
import os
import difflib
import math
from logwriter.TemplateBuilder import TemplateBuilder
class LogContext(object):
def __init__(self, outpath, templates_dir=None):
self.outpath = outpath
if not templates_dir:
self.templates = TemplateBuilder(os.path.join(os.path.dirname(__file__), "templates"))
else:
self.templates = TemplateBuilder(templates_dir)
self.client_public_ipv4 = "93.184.216.34"
self.fp = None
self.logfile = None
def openlog(self, filename):
self.logfile = os.path.join(self.outpath, filename)
self.fp = open(self.logfile, "w")
return self.fp
def closelog(self):
self.fp.close()
size = os.stat(self.logfile).st_size
if size == 0:
os.unlink(self.logfile)
def do_validate_keys(self, event_type, event, kvdict):
keys = self.retrieve_template_keys(event_type, kvdict["event_id"])
for k, v in kvdict.items():
if k not in keys:
if k in ["event_id"]:
continue
print("The key '%s' does not exists in the event_type '%s' template '%s'" % (k, event_type, event))
could_be = difflib.get_close_matches(k, keys, cutoff=0.6)
could_be = []
sk = k.split("_")
median = math.ceil(len(sk)/2) - 1
for k2 in keys:
strpart = sk[median].upper()
if strpart in k2.upper():
could_be.append(k2)
print("Could be:%s" % str(could_be))
def retrieve_template_keys(self, event_type, event):
return self.templates.get_template_keys(event_type, event)
def retrieve_template(self, template, event, variables_dict):
return self.templates.get_event(template, event, variables_dict)
def retrieve_template_header(self, template, event):
return self.templates.get_header(template, event)
|
#!/usr/bin/python3
# by William Hofferbert
# Midi Shutdown Server
# creates a virtual midi device via amidithru,
# then listens for a control change message
# on that device, running a shutdown command
# via os.system when it gets that command.
import time
import mido
import os
import re
# midi device naming
name = "MidiShutdownServer"
# shutdown command
shutdown_cmd = "sudo init 0 &"
# listen for
shutdown_cc_num = 64
shutdown_cc_val = 127
# prevent shutdown command from running unless uptime is > secs
shutdown_abort_uptime_secs = 120;
#
# Logic below
#
# set up backend
mido.set_backend('mido.backends.rtmidi')
# system command to set up the midi thru port
# TODO would be nice to do this in python, but
# rtmidi has issues seeing ports it has created
runCmd = "amidithru '" + name + "' &"
os.system(runCmd)
# regex to match on rtmidi port name convention
nameRegex = "(" + name + ":" + name + "\s+\d+:\d+)"
matcher = re.compile(nameRegex)
newList = list(filter(matcher.match, mido.get_input_names()))
input_name = newList[0]
inport = mido.open_input(input_name)
def uptime():
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
return uptime_seconds
# keep running and watch for cmd
while True:
time.sleep(.1)
while inport.pending():
msg = inport.receive()
if msg.type == "control_change":
if ( msg.control == shutdown_cc_num and msg.value == shutdown_cc_val ):
if ( uptime() > shutdown_abort_uptime_secs):
os.system(shutdown_cmd)
|
import time
import numpy as np
import sys
import random
import os
from os import listdir
from os.path import isfile, join
import re
import json
from sklearn.metrics.pairwise import cosine_similarity
from sklearn.cluster import DBSCAN
from sklearn.cluster import dbscan
from sklearn.feature_extraction.text import CountVectorizer,TfidfTransformer, TfidfVectorizer
from scipy.spatial import distance
from scipy import spatial
import cPickle
import matplotlib.pyplot as plt
from mediameter.cliff import Cliff
from leven import levenshtein
from multiprocessing import Pool,cpu_count
from joblib import Parallel, delayed
from sklearn.preprocessing import StandardScaler
from nltk.corpus import stopwords
from fasttext import FastVector
from nltk import StanfordNERTagger
from google.cloud import translate
from googleapiclient.discovery import build
from fasttext import FastVector
def loadfasttextmodel(filename):
filename='/home/ahmad/fastText_multilingual/'
w2v=dict()
#['en','es','zh','hr','de','fa','ar','fr']['es','en','de']
for lng in ['en']:
w2v[lng] = FastVector(vector_file=filename+'wiki.'+lng+'.vec')
w2v[lng].apply_transform(filename+'alignment_matrices/'+lng+'.txt')
return w2v
def dbclustering_purity(_w2vpairs,dbscan_eps=0.5, dbscan_minPts=2,min_samples_pt=2):
if _w2vpairs[0].size ==0 or _w2vpairs[1].size ==0:
return [[],[-100000,-100000,-100000], -100000]
X=np.vstack((_w2vpairs[0],_w2vpairs[1]))
X = StandardScaler().fit_transform(X)
Y=[1]*_w2vpairs[0].shape[0]+[2]*_w2vpairs[1].shape[0]
distance = cosine_similarity(X)+1
distance = distance/np.max(distance)
distance = 1 - distance
db = DBSCAN(eps=dbscan_eps, min_samples=dbscan_minPts, metric='precomputed', n_jobs=1).fit(distance.astype('float64'))
def cos_metric(x, y):
i, j = int(x[0]), int(y[0])# extract indices
#print cosine_similarity(X[i,].reshape(1,-1),X[j,].reshape(1,-1))
return cosine_similarity(X[i,].reshape(1,-1),X[j,].reshape(1,-1))
labels_=list(db.labels_)
#labels_=dbscan(X, eps=0.5, min_samples=5)[1]
_n=len(set(labels_))
if -1 in labels_:
_n -= 1
clusters= [[] for _ in range(_n)]
n_pure_cl=0
n_noise_cl=0
n_mixed_cl=0
n_pure_1=0
n_pure_2=0
for _idx,_lbl in enumerate(labels_):
if _lbl==-1:
n_noise_cl+=1
else:
clusters[_lbl].append(Y[_idx])
for _lbl in clusters:
if len(set(_lbl))>1:
n_mixed_cl+=1
else:
n_pure_cl+=1
if _lbl[0]==1:
n_pure_1+=1
elif _lbl[0]==2:
n_pure_2+=1
#print n_pure_1,n_pure_2,n_mixed_cl
if min(n_pure_1+n_mixed_cl,n_pure_2+n_mixed_cl)==0:
return [clusters, [n_pure_cl,n_mixed_cl,n_noise_cl], 1.0]
else:
return [clusters, [n_pure_cl,n_mixed_cl,n_noise_cl], 1.0*min(n_pure_1,n_pure_2)/(min(n_pure_1,n_pure_2)+n_mixed_cl+0.00001)]
sum([True for _lng in lng if 'es' == _lng[0] and 'en' == _lng[1]])
sum([True for _score in score if _score>1])
print len(transallleftNE),len(transallrightNE),len(lbl),len(lng)
#embeddingsmodel=loadfasttextmodel('Path To Vectors')
unifiedw2vmodel=dict()
'''
Allsentpairs=[]
Alllangpairs=[]
Allisdup_labels=[]
posfolderpath='/home/ahmad/duplicate-detection/eventregistrydata/positive/'
negfolderpath='/home/ahmad/duplicate-detection/eventregistrydata/negative/'
posfilenames = [join(posfolderpath, f) for f in listdir(posfolderpath) if isfile(join(posfolderpath, f))]
negfilenames = [join(negfolderpath, f) for f in listdir(negfolderpath) if isfile(join(negfolderpath, f))]
to=min(len(posfilenames),len(negfilenames))
print to
frm=0
#cnt=0
for frm in range(to):
labels,langpairs,sentpairs=create_w2v_pairs(unifiedw2vmodel,[posfilenames[frm]],[negfilenames[frm]])
if len(labels)==0:
continue
#print "processing ",frm,len(w2vpairs),len(w2vpairs[0]), " pairs"
if frm%50 == 0 and frm>0:
print frm
Allisdup_labels.extend(labels)
Alllangpairs.extend(langpairs)
Allsentpairs.extend(sentpairs)
len(Alllangpairs),len(Allsentpairs)
transallleft=[]
transallright=[]
label=[]
lng=[]
for _i in range(27296,len(Allsentpairs)):
if len(transallleft)>10000: #out of 124,948
print "NEXT", _i
break
if _i % 1000==0:
print _i, len(transallleft)
_sent=Allsentpairs[_i]
try:
if ((Alllangpairs[_i][0]=='es' or Alllangpairs[_i][0]=='de') and Alllangpairs[_i][1]=='en') or (Alllangpairs[_i][0]=='de' and Alllangpairs[_i][1]=='es'):
translation=service.translations().list(source=Alllangpairs[_i][0],target='en',q=[_sent[0]],format='text').execute()
#transwords=[transw['translatedText'].encode('utf-8') for transw in translation['translations']]
transwordsl=translation['translations'][0]['translatedText'].encode('utf-8')
if Alllangpairs[_i][1]!='en':
translation=service.translations().list(source=Alllangpairs[_i][1],target='en',q=_sent[1],format='text').execute()
#transwords=[transw['translatedText'].encode('utf-8') for transw in translation['translations']]
transwordsr=translation['translations'][0]['translatedText'].encode('utf-8')
else:
transwordsr=_sent[1]
transallleft.append(transwordsl)
transallright.append(transwordsr)
label.append(Allisdup_labels[_i])
lng.append(Alllangpairs[_i])
except:
print sys.exc_info()[0]
pass
'''
def dbclustering_purity(_w2vpairs,dbscan_eps=0.5, dbscan_minPts=2,min_samples_pt=2):
if _w2vpairs[0].size ==0 or _w2vpairs[1].size ==0:
return [[],[-100000,-100000,-100000], -100000]
X=np.vstack((_w2vpairs[0],_w2vpairs[1]))
X = StandardScaler().fit_transform(X)
Y=[1]*_w2vpairs[0].shape[0]+[2]*_w2vpairs[1].shape[0]
distance = cosine_similarity(X)+1
distance = distance/np.max(distance)
distance = 1 - distance
db = DBSCAN(eps=dbscan_eps, min_samples=dbscan_minPts, metric='precomputed', n_jobs=1).fit(distance.astype('float64'))
def cos_metric(x, y):
i, j = int(x[0]), int(y[0])# extract indices
#print cosine_similarity(X[i,].reshape(1,-1),X[j,].reshape(1,-1))
return cosine_similarity(X[i,].reshape(1,-1),X[j,].reshape(1,-1))
labels_=list(db.labels_)
#labels_=dbscan(X, eps=0.5, min_samples=5)[1]
_n=len(set(labels_))
if -1 in labels_:
_n -= 1
clusters= [[] for _ in range(_n)]
n_pure_cl=0
n_noise_cl=0
n_mixed_cl=0
n_pure_1=0
n_pure_2=0
for _idx,_lbl in enumerate(labels_):
if _lbl==-1:
n_noise_cl+=1
else:
clusters[_lbl].append(Y[_idx])
for _lbl in clusters:
if len(set(_lbl))>1:
n_mixed_cl+=1
else:
n_pure_cl+=1
if _lbl[0]==1:
n_pure_1+=1
elif _lbl[0]==2:
n_pure_2+=1
#print n_pure_1,n_pure_2,n_mixed_cl
if min(n_pure_1+n_mixed_cl,n_pure_2+n_mixed_cl)==0:
return [clusters, [n_pure_cl,n_mixed_cl,n_noise_cl], 1.0]
else:
return [clusters, [n_pure_cl,n_mixed_cl,n_noise_cl], 1.0*min(n_pure_1,n_pure_2)/(min(n_pure_1,n_pure_2)+n_mixed_cl+0.00001)]
Allpureclustersratio=[]
dbscanlabels=[]
dbh=0.01
_lng0='de'
_lng1='es'
def run(dbh=0.40,_lng0='es',_lng1='en'):
Allpureclustersratio=[]
dbscanlabels=[]
for idx in range(len(label)):
if lng[idx][0]!=_lng0 or lng[idx][1]!=_lng1:
continue
w2vmatrix1=[]
wlist=[]
wordslist=re.split(r'[`\-=~!@#$%^&*()_+\[\]{};\'\\:"|<,./<>? ]', transallleft[idx])
for word in wordslist:
if '' !=word.strip() and word.strip().lower() not in stpwords:
try:
if type(word)!=type(''):
word=word.strip().lower().encode('utf-8')
else:
word=word.strip().lower()
w2vmatrix1.append(list(embeddingsmodel['en'][word]))
except:
#print sys.exc_info()[0]
pass
embeddingpr=[np.array(w2vmatrix1)]
w2vmatrix2=[]
wordslist=re.split(r'[`\-=~!@#$%^&*()_+\[\]{};\'\\:"|<,./<>? ]', transallright[idx])
for word in wordslist:
if '' !=word.strip() and word.strip().lower() not in stpwords:
try:
if type(word)!=type(''):
word=word.strip().lower().encode('utf-8')
else:
word=word.strip().lower()
w2vmatrix2.append(list(embeddingsmodel['en'][word]))
except:
#print sys.exc_info()[0]
pass
embeddingpr.append(np.array(w2vmatrix2))
if len(embeddingpr[0])==0 or len(embeddingpr[1])==0:
print idx
continue
if idx%1000==0:
print "processing ",idx
clustersdist,numclusters,pureclustersratio=dbclustering_purity(embeddingpr,dbscan_eps=dbh, dbscan_minPts=2, min_samples_pt =2)
Allpureclustersratio.append(pureclustersratio)
dbscanlabels.append(label[idx])
return [Allpureclustersratio, dbscanlabels]
dbh=0.2
_lng0='de'
_lng1='es'
Allpureclustersratio, dbscanlabels=run(dbh,_lng0,_lng1)
countpos=sum([True for _lbl,_lng in zip(Allisdup_labels,Alllangpairs) if _lbl==1 and _lng[0]==_lng0 and _lng[1]==_lng1])
countneg=sum([True for _lbl,_lng in zip(Allisdup_labels,Alllangpairs) if _lbl==0 and _lng[0]==_lng0 and _lng[1]==_lng1])
countpos=sum([True for _lbl in dbscanlabels if _lbl==1 ])
countneg=sum([True for _lbl in dbscanlabels if _lbl==0 ])
h=0.5
TP=sum([True for pp,_lbl in zip(Allpureclustersratio,dbscanlabels) if pp<=h and pp>=0 and _lbl==1])
FP=sum([True for pp,_lbl in zip(Allpureclustersratio,dbscanlabels) if pp<=h and pp>=0 and _lbl==0])
TN=sum([True for pp,_lbl in zip(Allpureclustersratio,dbscanlabels) if pp>h and pp>=0 and _lbl==0])
FN=sum([True for pp,_lbl in zip(Allpureclustersratio,dbscanlabels) if pp>h and pp>=0 and _lbl==1])
d=countpos-(TP+FN)
#FN+=d
d=countneg-(TN+FP)
#FP+=d
poserror=sum([True for pp,_lbl in zip(Allpureclustersratio,labels) if pp<0 and _lbl==1])
negerror=sum([True for pp,_lbl in zip(Allpureclustersratio,labels) if pp<0 and _lbl==0])
Precision=100.0*TP/(TP+FP+0.000001)
Recall=100.0*TP/(TP+FN+0.000001)
F1=100.0*(2.0*TP)/((2.0*TP+1.0*FN+FP)+0.000001)
F2=100.0*(5.0*TP)/((5.0*TP+4.0*FN+FP)+0.000001)
print dbh,_lng0,_lng1,TP,TN,FP,FN,str(100.0*(TP+TN)/(TP+TN+FP+FN+0.0001)) + "," + str(F1)+", "+ str(F2)+", "+ str(Precision)+", "+ str(Recall)+ str(", ")+ str(100.0*TP/(TP+FN+0.0001))+ str(", ")+ str(100.0*TN/(TN+FP+0.0001))+ str(", ")+ str( 100.0*FP/(TN+FP+0.0001))+ str(", ")+ str( 100.0*FN/(TP+FN+0.0001))+", "+str((TP+FN))+", "+str((TN+FP))+", "+str((1.0*TP+FN)/(TN+FP+TP+FN+0.0001)),h,poserror,negerror
print Precision,Recall
print 100.0*FP/(FP+TN),100.0*FN/(FN+TP)
print TP+TN+FP+FN,countpos,countneg
|
import pandas as pd
import numpy as np
import sys
if len(sys.argv) < 2:
print("Run as: python/python3 <file_name.py> <absolute_path_of_test_file>")
sys.exit(0)
csv_path = 'AdmissionDataset/data.csv'#raw_input("Enter path to input CSV file: ")
dataset = pd.read_csv(csv_path)
dataset.drop(dataset.columns[[0]], axis=1, inplace=True)
#split data into train data and validation data
splitted = np.split(dataset, [int(.8 * len(dataset.index))])
train_data = splitted[0]
validation_data = splitted[1]
Attributes = dataset.keys()[[0,1,2,3,4,5,6]]
Label = dataset.keys()[7]
for att in Attributes:
mean = np.mean(train_data[att].values)
std = np.std(train_data[att].values)
train_data[att] = (train_data[att]-mean)/(std)
for att in Attributes:
mean = np.mean(validation_data[att].values)
std = np.std(validation_data[att].values)
validation_data[att] = (validation_data[att]-mean)/(std)
att_data = train_data[Attributes]
label_data = train_data[Label]
X = att_data.values
Y = label_data.values
X = np.array(X)
extra_col = np.ones([X.shape[0],1])
X = np.concatenate((extra_col,X),axis=1)
theta = np.matmul( np.matmul( np.linalg.inv(np.matmul(X.T, X)), X.T ) , Y )
print ('theta: '+str(theta))
def predict(row):
x = row.values
x = x.reshape([1,7])
extra_col = np.ones([1,1])
x = np.concatenate((extra_col,x),axis=1)
y = np.dot(x, theta)
return float(y)
def calculate_performance(validation_data):
print ('\n\n\nPerformance Measures\n\n')
MSE = 0.0
MAE = 0.0
MPE = 0.0
for i in range(len(validation_data)):
row = validation_data.iloc[[i]]
y_predicted = predict(validation_data.iloc[[i]][Attributes])
y_actual = float(row[Label])
MSE += (y_predicted-y_actual)**2
MAE += abs(y_predicted-y_actual)
MPE += (y_actual-y_predicted)/y_actual
MSE /= len(validation_data)
MAE /= len(validation_data)
MPE /= len(validation_data)
MPE *= 100
print ('Mean Squared Error:')
print (MSE)
print ('\nMean Absolute Error:')
print (MAE)
print ('\nMean Percentage Error:')
print (MPE)
calculate_performance(validation_data)
csv_path = str(sys.argv[1])#raw_input("Enter path to input CSV file: ")
test_set = pd.read_csv(csv_path)
test_set.drop(test_set.columns[[0]], axis=1, inplace=True)
def predict_test(test_set):
for att in Attributes:
mean = np.mean(test_set[att].values)
std = np.std(test_set[att].values)
test_set[att] = (test_set[att]-mean)/(std)
print ("\nPredictions:")
calculate_performance(test_set)
predict_test(test_set)
|
#!/usr/bin/env python
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*-
from os import path
import multiprocessing
from bes.testing.unit_test import unit_test
from bes.git.git_repo_operation_options import git_repo_operation_options
from bes.git.git_temp_repo import git_temp_repo
from bes.git.git_unit_test import git_temp_home_func
from bes.git.git_util import git_util
class test_git_util_repo_update_submodule(unit_test):
@git_temp_home_func()
def test_simple(self):
sub_content = [
'file subfoo.txt "this is subfoo" 644',
]
sub_repo = self._make_repo(remote = True, content = sub_content, prefix = '-mod-')
rev1 = sub_repo.last_commit_hash(short_hash = True)
content = [
'file foo.txt "this is foo" 644',
]
r1 = self._make_repo(remote = True, content = content, prefix = '-main-')
self.assertEqual( [ 'foo.txt' ], r1.find_all_files() )
r1.submodule_add(sub_repo.address, 'submod1')
r1.commit('add mod submodule', '.')
r1.push()
self.assertEqual( rev1, r1.submodule_status_one('submod1').revision )
rev2 = sub_repo.add_file('sub_kiwi.txt', 'this is sub_kiwi.txt', push = True)
self.assertEqual( rev1, r1.submodule_status_one('submod1').revision )
git_util.repo_update_submodule(r1.address, 'submod1', 'master', rev2, False)
r1.pull()
r1.submodule_init(submodule = 'submod1')
self.assertEqual( rev2, r1.submodule_status_one('submod1').revision )
@git_temp_home_func()
def test_two_submodules(self):
sub_content1 = [
'file subfoo1.txt "this is subrepo1" 644',
]
sub_repo1 = self._make_repo(remote = True, content = sub_content1, prefix = '-subrepo1-')
sub_repo1_rev1 = sub_repo1.last_commit_hash(short_hash = True)
sub_content2 = [
'file subfoo2.txt "this is subrepo2" 644',
]
sub_repo2 = self._make_repo(remote = True, content = sub_content2, prefix = '-subprepo2-')
sub_repo2_rev1 = sub_repo2.last_commit_hash(short_hash = True)
content = [
'file foo.txt "this is foo" 644',
]
r1 = self._make_repo(remote = True, content = content, prefix = '-main-')
self.assertEqual( [ 'foo.txt' ], r1.find_all_files() )
r1.submodule_add(sub_repo1.address, 'submod1')
r1.commit('add mod submod1', [ '.gitmodules', 'submod1' ])
r1.push()
self.assertEqual( sub_repo1_rev1, r1.submodule_status_one('submod1').revision )
r1.submodule_add(sub_repo2.address, 'submod2')
r1.commit('add mod submod2', [ '.gitmodules', 'submod2' ])
r1.push()
self.assertEqual( sub_repo2_rev1, r1.submodule_status_one('submod2').revision )
sub_repo1_rev2 = sub_repo1.add_file('sub_kiwi1.txt', 'this is sub_kiwi1.txt', push = True)
self.assertEqual( sub_repo1_rev1, r1.submodule_status_one('submod1').revision )
git_util.repo_update_submodule(r1.address, 'submod1', 'master', sub_repo1_rev2, False)
r1.pull()
r1.submodule_init(submodule = 'submod1')
self.assertEqual( sub_repo1_rev2, r1.submodule_status_one('submod1').revision )
sub_repo2_rev2 = sub_repo2.add_file('sub_kiwi2.txt', 'this is sub_kiwi2.txt', push = True)
self.assertEqual( sub_repo2_rev1, r1.submodule_status_one('submod2').revision )
git_util.repo_update_submodule(r1.address, 'submod2', 'master', sub_repo2_rev2, False)
r1.pull()
r1.submodule_init(submodule = 'submod2')
self.assertEqual( sub_repo2_rev2, r1.submodule_status_one('submod2').revision )
@git_temp_home_func()
def test_no_change(self):
sub_content = [
'file subfoo.txt "this is subfoo" 644',
]
sub_repo = self._make_repo(remote = True, content = sub_content, prefix = '-mod-')
rev1 = sub_repo.last_commit_hash(short_hash = True)
content = [
'file foo.txt "this is foo" 644',
]
r1 = self._make_repo(remote = True, content = content, prefix = '-main-')
self.assertEqual( [ 'foo.txt' ], r1.find_all_files() )
r1.submodule_add(sub_repo.address, 'submod1')
r1.commit('add mod submodule', '.')
r1.push()
self.assertEqual( rev1, r1.submodule_status_one('submod1').revision )
git_util.repo_update_submodule(r1.address, 'submod1', 'master', rev1, False)
r1.pull()
r1.submodule_init(submodule = 'submod1')
self.assertEqual( rev1, r1.submodule_status_one('submod1').revision )
def _make_repo(self, remote = True, content = None, prefix = None, commit_message = None):
return git_temp_repo(remote = remote, content = content, prefix = prefix,
debug = self.DEBUG, commit_message = commit_message)
if __name__ == '__main__':
unit_test.main()
|
# Generated by Django 3.2.7 on 2021-10-11 08:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0011_auto_20211011_1358'),
]
operations = [
migrations.AlterField(
model_name='student',
name='SeatCategory',
field=models.TextField(blank=True, default='', null=True),
),
]
|
from flask import Blueprint, jsonify
from tabulation.controllers.user_controller import UserController
from tabulation.controllers.event_controller import EventController
from tabulation.controllers.criteria_controller import CriteriaController
from tabulation.controllers.judge_controller import JudgeController
from tabulation.controllers.participant_controller import ParticipantController
from tabulation.controllers.role_controller import RoleController
from tabulation.controllers.organization_controller import OrganizationController
from tabulation.controllers.organization_type_controller import OrganizationTypeController
from tabulation.controllers.event_type_controller import EventTypeController
from tabulation.controllers.participant_team_controller import ParticipantTeamController
from tabulation.controllers.participant_type_controller import ParticipantTypeController
from tabulation.controllers.event_score_controller import EventScoreController
from tabulation.models.db import *
from tabulation.utils.functions.access_restriction import *
main = Blueprint('main', __name__)
@main.route('/ping', methods=['GET'])
def ping():
return jsonify({})
"""
+--------------------------------------------------------------------------
| User routes
+--------------------------------------------------------------------------
| Below are the routes related to users.
|
"""
# Get routes
@main.route('/users', methods=['GET'])
@token_required(allowed_guest=False)
def get_users(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return UserController.all()
@main.route('/users/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_user(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return UserController.find(id)
# Post routes
@main.route('/users', methods=['POST'])
@token_required(allowed_guest=False)
def create_user(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return UserController.create()
# Put routes
@main.route('/users/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_user(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return UserController.update(id)
# Delete routes
@main.route('/users/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_user(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return UserController.delete(id)
"""
+--------------------------------------------------------------------------
| Event routes
+--------------------------------------------------------------------------
| Below are the routes related to events.
|
"""
# Get routes
@main.route('/events', methods=['GET'])
@token_required(allowed_guest=True)
def get_events(current_user):
return EventController.all()
@main.route('/events/<id>', methods=['GET'])
@token_required(allowed_guest=True)
def get_event(current_user, id):
return EventController.find(id)
@main.route('/events/<id>/unassigned-judges', methods=['GET'])
@token_required(allowed_guest=True)
def get_unassigned_judges_from_event(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.get_unassigned_judges(id)
@main.route('/events/<id>/find-judge/<judge_id>', methods=['GET'])
@token_required(allowed_guest=True)
def get_judge_from_event(current_user, id, judge_id):
return EventController.find_judge(id, judge_id)
@main.route('/events/<id>/unassigned-participants', methods=['GET'])
@token_required(allowed_guest=True)
def get_unassigned_participants_from_event(current_user, id):
return EventController.get_unassigned_participants(id)
# Post routes
@main.route('/events', methods=['POST'])
@token_required(allowed_guest=False)
def create_event(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.create()
@main.route('/events/<id>/add-judge', methods=['POST'])
@token_required(allowed_guest=False)
def add_judge_from_event(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.add_judge(id)
@main.route('/events/<id>/add-participant', methods=['POST'])
@token_required(allowed_guest=False)
def add_participant_from_event(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.add_participant(id)
@main.route('/events/delete', methods=['POST'])
@token_required(allowed_guest=False)
def delete_multiple_events(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.delete_multiple()
# Put routes
@main.route('/events/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_event(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.update(id)
# Delete routes
@main.route('/events/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_event(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.delete(id)
@main.route('/events/<id>/delete-judge/<judge_id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_judge_from_event(current_user, id, judge_id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.delete_judge(id, judge_id)
@main.route('/events/<id>/delete-participant/<participant_id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_participant_from_event(current_user, id, participant_id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventController.delete_participant(id, participant_id)
"""
+--------------------------------------------------------------------------
| Event Criteria routes
+--------------------------------------------------------------------------
| Below are the routes related to criterias.
|
"""
# Get routes
@main.route('/events/<event_id>/criterias', methods=['GET'])
@token_required(allowed_guest=True)
def get_criterias(current_user, event_id):
return CriteriaController.all(event_id)
@main.route('/events/<event_id>/criterias/<criteria_id>', methods=['GET'])
@token_required(allowed_guest=True)
def get_criteria(current_user, event_id, criteria_id):
return CriteriaController.find(event_id, criteria_id)
# Post routes
@main.route('/events/<event_id>/criterias', methods=['POST'])
@token_required(allowed_guest=False)
def create_criteria(current_user, event_id):
return CriteriaController.create(event_id)
# Put routes
@main.route('/events/<event_id>/criterias/<criteria_id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_criteria(current_user, event_id, criteria_id):
return CriteriaController.update(event_id, criteria_id)
# Delete routes
@main.route('/events/<event_id>/criterias/<criteria_id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_criteria(current_user, event_id, criteria_id):
return CriteriaController.delete(event_id, criteria_id)
"""
+--------------------------------------------------------------------------
| Judge routes
+--------------------------------------------------------------------------
| Below are the routes related to judges.
|
"""
# Get routes
@main.route('/judges', methods=['GET'])
@token_required(allowed_guest=True)
def get_judges(current_user):
return JudgeController.all()
@main.route('/judges/<id>', methods=['GET'])
@token_required(allowed_guest=True)
def get_judge(current_user, id):
return JudgeController.find(id)
# Post routes
@main.route('/judges', methods=['POST'])
@token_required(allowed_guest=False)
def create_judge(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return JudgeController.create()
# Delete routes
@main.route('/judges/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_judge(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return JudgeController.delete(id)
"""
+--------------------------------------------------------------------------
| Participant routes
+--------------------------------------------------------------------------
| Below are the routes related to participants.
|
"""
# Get routes
@main.route('/participants', methods=['GET'])
@token_required(allowed_guest=True)
def get_participants(current_user):
return ParticipantController.all()
@main.route('/participants/<id>', methods=['GET'])
@token_required(allowed_guest=True)
def get_participant(current_user, id):
return ParticipantController.find(id)
# Post routes
@main.route('/participants', methods=['POST'])
@token_required(allowed_guest=False)
def create_participant(current_user):
return ParticipantController.create()
# Put routes
@main.route('/participants/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_participant(current_user, id):
return ParticipantController.update(id)
# Delete routes
@main.route('/participants/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_participant(current_user, id):
return ParticipantController.delete(id)
"""
+--------------------------------------------------------------------------
| Role routes
+--------------------------------------------------------------------------
| Below are the routes related to roles.
|
"""
# Get routes
@main.route('/roles', methods=['GET'])
@token_required(allowed_guest=False)
def get_roles(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return RoleController.all()
@main.route('/roles/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_role(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return RoleController.find(id)
# Post routes
@main.route('/roles', methods=['POST'])
@token_required(allowed_guest=False)
def create_role(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return RoleController.create()
# Put routes
@main.route('/roles/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_role(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return RoleController.update(id)
# Delete routes
@main.route('/roles/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_role(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return RoleController.delete(id)
"""
+--------------------------------------------------------------------------
| Organization routes
+--------------------------------------------------------------------------
| Below are the routes related to organizations.
|
"""
# Get routes
@main.route('/organizations', methods=['GET'])
@token_required(allowed_guest=False)
def get_organizations(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationController.all()
@main.route('/organizations/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_organization(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationController.find(id)
# Post routes
@main.route('/organizations', methods=['POST'])
@token_required(allowed_guest=False)
def create_organization(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationController.create()
# Put routes
@main.route('/organizations/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_organization(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationController.update(id)
# Delete routes
@main.route('/organizations/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_organization(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationController.delete(id)
"""
+--------------------------------------------------------------------------
| OrganizationType routes
+--------------------------------------------------------------------------
| Below are the routes related to organization types.
|
"""
# Get routes
@main.route('/organization-types', methods=['GET'])
@token_required(allowed_guest=False)
def get_organization_types(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationTypeController.all()
@main.route('/organization-types/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_organization_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationTypeController.find(id)
# Post routes
@main.route('/organization-types', methods=['POST'])
@token_required(allowed_guest=False)
def create_organization_type(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationTypeController.create()
# Put routes
@main.route('/organization-types/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_organization_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationTypeController.update(id)
# Delete routes
@main.route('/organization-types/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_organization_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return OrganizationTypeController.delete(id)
"""
+--------------------------------------------------------------------------
| EventType routes
+--------------------------------------------------------------------------
| Below are the routes related to event types.
|
"""
# Get routes
@main.route('/event-types', methods=['GET'])
@token_required(allowed_guest=False)
def get_event_types(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventTypeController.all()
@main.route('/event-types/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_event_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventTypeController.find(id)
# Post routes
@main.route('/event-types', methods=['POST'])
@token_required(allowed_guest=False)
def create_event_type(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventTypeController.create()
@main.route('/event-types/delete', methods=['POST'])
@token_required(allowed_guest=False)
def delete_multiple_event_types(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventTypeController.delete_multiple()
# Put routes
@main.route('/event-types/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_event_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventTypeController.update(id)
# Delete routes
@main.route('/event-types/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_event_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return EventTypeController.delete(id)
"""
+--------------------------------------------------------------------------
| ParticipantTeam routes
+--------------------------------------------------------------------------
| Below are the routes related to participant teams.
|
"""
# Get routes
@main.route('/participant-teams', methods=['GET'])
@token_required(allowed_guest=False)
def get_participant_teams(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTeamController.all()
@main.route('/participant-teams/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_participant_team(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTeamController.find(id)
# Post routes
@main.route('/participant-teams', methods=['POST'])
@token_required(allowed_guest=False)
def create_participant_team(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTeamController.create()
# Put routes
@main.route('/participant-teams/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_participant_team(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTeamController.update(id)
# Delete routes
@main.route('/participant-teams/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_participant_team(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTeamController.delete(id)
"""
+--------------------------------------------------------------------------
| ParticipantType routes
+--------------------------------------------------------------------------
| Below are the routes related to participant types.
|
"""
# Get routes
@main.route('/participant-types', methods=['GET'])
@token_required(allowed_guest=False)
def get_participant_types(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTypeController.all()
@main.route('/participant-types/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_participant_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTypeController.find(id)
# Post routes
@main.route('/participant-types', methods=['POST'])
@token_required(allowed_guest=False)
def create_participant_type(current_user):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTypeController.create()
# Put routes
@main.route('/participant-types/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_participant_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTypeController.update(id)
# Delete routes
@main.route('/participant-types/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_participant_type(current_user, id):
if is_admin(current_user):
return jsonify(unauthorized_access()), 401
return ParticipantTypeController.delete(id)
"""
+--------------------------------------------------------------------------
| EventScore routes
+--------------------------------------------------------------------------
| Below are the routes related to event scores.
|
"""
# Get routes
@main.route('/event-scores', methods=['GET'])
@token_required(allowed_guest=False)
def get_event_scores(current_user):
if is_judge(current_user):
return jsonify(unauthorized_access()), 401
return EventScoreController.all()
@main.route('/event-scores/<id>', methods=['GET'])
@token_required(allowed_guest=False)
def get_event_score(current_user, id):
if is_judge(current_user):
return jsonify(unauthorized_access()), 401
return EventScoreController.find(id)
# Post routes
@main.route('/event-scores', methods=['POST'])
@token_required(allowed_guest=False)
def create_event_score(current_user):
if is_judge(current_user):
return jsonify(unauthorized_access()), 401
return EventScoreController.create()
# Put routes
@main.route('/event-scores/<id>', methods=['PUT'])
@token_required(allowed_guest=False)
def update_event_score(current_user, id):
if is_judge(current_user):
return jsonify(unauthorized_access()), 401
return EventScoreController.update(id)
# Delete routes
@main.route('/event-scores/<id>', methods=['DELETE'])
@token_required(allowed_guest=False)
def delete_event_score(current_user, id):
if is_judge(current_user):
return jsonify(unauthorized_access()), 401
return EventScoreController.delete(id) |
# -*- coding: utf-8 -*-
from django.http import HttpResponse, Http404, HttpResponseForbidden
from django.shortcuts import render, redirect
from ..models import *
from .. import utils
from .. import exceptions
import json
def downloadFile(request, fileSequence = -1):
pass
def uploadFile(request):
if request.method != 'POST':
return HttpResponseForbidden()
# 멤버 정보 확인.
if not request.session.get('member_login'):
return HttpResponseForbidden()
# 파일 정보 확인.
## upFile 이라는 이름으로 올라옴.
try:
reqFile = request.FILES.getlist('upFile')
if len(reqFile) == 0:
raise exceptions.FileException(' 잘못된 요청입니다. ')
try:
lastImage = File.objects.latest('id')
lastId = lastImage.id + 1
except:
lastId = 1
# 파일 업로드
rFile = utils.fileUploadSingle( reqFile[0] ) # 현재파일 , 변환파일, 파일 타입.
# 데이터베이스에 기록.
oFile = File( seq = lastId, inFILE = rFile[0], outFILE = rFile[1], typeFILE = rFile[2])
oFile.save()
# 파일 이름 리턴.
json_message = utils.sMessage( data = rFile[1] )
except exceptions.FileException, e:
json_message = utils.sMessage( data = ' 잘못된 요청입니다. ', error = True)
except Exception, e:
json_message = utils.sMessage( data = ' 이미지 업로드를 실패하였습니다. ', error = True)
return HttpResponse( json_message )
|
## medium
## tree, dfs
## 48ms, beats 100%
## 14.9mb beats 100%
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def isValidBST(self, root: 'TreeNode') -> 'bool':
def dfs(node,low,high):
if not node:
return True
if node.val>=low:
return False
if node.val<=high:
return False
return dfs(node.right,low,node.val) and dfs(node.left,node.val,high)
return dfs(root,float('inf'),-float('inf'))
|
try:
from collections.abc import Iterable
except ImportError:
Iterable = (tuple, list)
from gym.vector.async_vector_env import AsyncVectorEnv
from gym.vector.vector_env import VectorEnv
import gym
from gym import logger
from gym.vector.utils import concatenate, create_empty_array
import numpy as np
from copy import deepcopy
import torch
from torch import distributions
import torch.nn.functional as F
import numpy as np
import re
import os
from dataclasses import dataclass
from dotmap import DotMap
import pathlib
import time
import pickle
from torch import optim
from collections import OrderedDict
from copy import deepcopy
from typing import Sequence
import numpy as np
import multiprocessing
import random
from pettingzoo.sisl import multiwalker_v0
N_AGENTS = 3
def calc_discounted_return(rewards, discount, final_value):
"""
Calculate discounted returns based on rewards and discount factor.
"""
seq_len = len(rewards)
discounted_returns = torch.zeros(seq_len)
discounted_returns[-1] = rewards[-1] + discount * final_value
for i in range(seq_len - 2, -1, -1):
discounted_returns[i] = rewards[i] + \
discount * discounted_returns[i + 1]
return discounted_returns
def compute_advantages(rewards, values, discount, gae_lambda):
"""
Compute General Advantage.
"""
deltas = rewards + discount * values[1:] - values[:-1]
seq_len = len(rewards)
advs = torch.zeros(seq_len + 1)
multiplier = discount * gae_lambda
for i in range(seq_len - 1, -1, -1):
advs[i] = advs[i + 1] * multiplier + deltas[i]
return advs[:-1]
def save_parameters(writer, tag, model, batch_idx):
"""
Save model parameters for tensorboard.
"""
_INVALID_TAG_CHARACTERS = re.compile(r"[^-/\w\.]")
for k, v in model.state_dict().items():
shape = v.shape
# Fix shape definition for tensorboard.
shape_formatted = _INVALID_TAG_CHARACTERS.sub("_", str(shape))
# Don't do this for single weights or biases
if np.any(np.array(shape) > 1):
mean = torch.mean(v)
std_dev = torch.std(v)
maximum = torch.max(v)
minimum = torch.min(v)
writer.add_scalars(
"{}_weights/{}{}".format(tag, k, shape_formatted),
{"mean": mean, "std_dev": std_dev, "max": maximum, "min": minimum},
batch_idx,
)
else:
writer.add_scalar("{}_{}{}".format(
tag, k, shape_formatted), v.data, batch_idx)
def get_last_checkpoint_iteration(base_checkpoint_path):
"""
Determine latest checkpoint iteration.
"""
if os.path.isdir(base_checkpoint_path):
max_checkpoint_iteration = max(
[int(dirname) for dirname in os.listdir(base_checkpoint_path)])
else:
max_checkpoint_iteration = 0
return max_checkpoint_iteration
def save_checkpoint(actor,
critic,
actor_optimizer,
critic_optimizer,
iteration,
stop_conditions,
hp,
base_checkpoint_path):
"""
Save training checkpoint.
"""
checkpoint = DotMap()
# checkpoint.env = env_id
checkpoint.iteration = iteration
checkpoint.stop_conditions = stop_conditions
checkpoint.hp = hp
CHECKPOINT_PATH = base_checkpoint_path # + f"{iteration}/"
pathlib.Path(CHECKPOINT_PATH).mkdir(parents=True, exist_ok=True)
with open(CHECKPOINT_PATH + "parameters.pt", "wb") as f:
pickle.dump(checkpoint, f)
# with open(CHECKPOINT_PATH + "actor_class.pt", "wb") as f:
# pickle.dump(Actor, f)
# with open(CHECKPOINT_PATH + "critic_class.pt", "wb") as f:
# pickle.dump(Critic, f)
if isinstance(actor, list):
for i, (a, a_optimizer) in enumerate(zip(actor, actor_optimizer)):
torch.save(a.state_dict(), CHECKPOINT_PATH +
"actor_" + str(i+1) + ".pt")
# torch.save(c.state_dict(), CHECKPOINT_PATH + "critic_" + str(i+1) + ".pt")
torch.save(a_optimizer.state_dict(), CHECKPOINT_PATH +
"actor_optimizer_" + str(i+1) + ".pt")
# torch.save(c_optimizer.state_dict(), CHECKPOINT_PATH + "critic_optimizer_" + str(i+1) + ".pt")
if isinstance(critic, list):
for i, (c, c_optimizer) in enumerate(zip(critic, critic_optimizer)):
# torch.save(a.state_dict(), CHECKPOINT_PATH + "actor_" + str(i+1) + ".pt")
torch.save(c.state_dict(), CHECKPOINT_PATH +
"critic_" + str(i+1) + ".pt")
# torch.save(a_optimizer.state_dict(), CHECKPOINT_PATH + "actor_optimizer_" + str(i+1) + ".pt")
torch.save(c_optimizer.state_dict(), CHECKPOINT_PATH +
"critic_optimizer_" + str(i+1) + ".pt")
else:
# torch.save(actor.state_dict(), CHECKPOINT_PATH + "actor.pt")
torch.save(critic.state_dict(), CHECKPOINT_PATH + "critic.pt")
# torch.save(actor_optimizer.state_dict(), CHECKPOINT_PATH + "actor_optimizer.pt")
torch.save(critic_optimizer.state_dict(),
CHECKPOINT_PATH + "critic_optimizer.pt")
else:
torch.save(actor.state_dict(), CHECKPOINT_PATH + "actor.pt")
torch.save(critic.state_dict(), CHECKPOINT_PATH + "critic.pt")
torch.save(actor_optimizer.state_dict(),
CHECKPOINT_PATH + "actor_optimizer.pt")
torch.save(critic_optimizer.state_dict(),
CHECKPOINT_PATH + "critic_optimizer.pt")
@dataclass
class StopConditions():
"""
Store parameters and variables used to stop training.
"""
best_reward: float = -np.inf
fail_to_improve_count: int = 0
max_iterations: int = 501
def load_checkpoint(iteration, hp, base_checkpoint_path, env_id, train_device):
"""
Load from training checkpoint.
"""
CHECKPOINT_PATH = base_checkpoint_path + f"{iteration}/"
with open(CHECKPOINT_PATH + "parameters.pt", "rb") as f:
checkpoint = pickle.load(f)
assert env_id == checkpoint.env, "To resume training environment must match current settings."
# assert ENV_MASK_VELOCITY == checkpoint.env_mask_velocity, "To resume training model architecture must match current settings."
assert hp == checkpoint.hp, "To resume training hyperparameters must match current settings."
actor_state_dict = torch.load(
CHECKPOINT_PATH + "actor.pt", map_location=torch.device(train_device))
critic_state_dict = torch.load(
CHECKPOINT_PATH + "critic.pt", map_location=torch.device(train_device))
actor_optimizer_state_dict = torch.load(CHECKPOINT_PATH + "actor_optimizer.pt",
map_location=torch.device(train_device))
critic_optimizer_state_dict = torch.load(CHECKPOINT_PATH + "critic_optimizer.pt",
map_location=torch.device(train_device))
return (actor_state_dict, critic_state_dict,
actor_optimizer_state_dict, critic_optimizer_state_dict,
checkpoint.stop_conditions)
def one_hot_embedding(labels, num_classes):
"""Embedding labels to one-hot form.
Args:
labels: (LongTensor) class labels, sized [N,].
num_classes: (int) number of classes.
Returns:
(tensor) encoded labels, sized [N, #classes].
"""
y = torch.eye(num_classes)
return y[labels]
def make_env(num_envs=1, asynchronous=True, wrappers=None, env_cfg=None, **kwargs):
def _make_env(seed):
def _make():
env = multiwalker_v0.env(**env_cfg)
env = EnvWrapper(env)
# env = ObsWrapper(env)
# env = ActionWrapper(env)
# env = TimeLimit(env)
return env
return _make
env_fns = [_make_env(i) for i in range(num_envs)]
return AsyncVectorEnv(env_fns) if asynchronous else SyncVectorEnv(env_fns)
def get_env_space():
"""
Return obsvervation dimensions, action dimensions and whether or not action space is continuous.
"""
# env = gym.make(ENV)
env = multiwalker_v0.env()
env = EnvWrapper(env)
continuous_action_space = type(env.action_space) is gym.spaces.box.Box
if continuous_action_space:
action_dim = env.action_space.shape[0]
# else:
# if type(env.action_space) == list:
# action_dim = env.action_spaces[0].n
# else:
# action_dim = env.action_space.n
if type(env.observation_spaces) == list:
obsv_dim = env.observation_space.shape[0]
else:
obsv_dim = env.observation_space.shape[0]
return obsv_dim, action_dim, continuous_action_space
class EnvWrapper(gym.Wrapper):
def __init__(self, env):
# super().__init__(env)
self.env = env
self.observation_space = env.observation_spaces['walker_0']
self.action_space = env.action_spaces['walker_0']
# self.reward_range = self.env.reward_range
self.metadata = self.env.metadata
def step(self, action):
action = np.clip(action, -1, 1)
reward, done, info = self.env.last()
next_state = self.env.step(action)
return next_state, reward, done, info
|
import pdb
import torch
import torch.nn as nn
class SST(nn.Module):
"""
Container module with 1D convolutions to generate proposals
"""
def __init__(self, opt):
super(SST, self).__init__()
self.scores = torch.nn.Linear(opt.hidden_dim, opt.K)
# Saving arguments
self.video_dim = opt.video_dim
#self.W = opt.W
self.rnn_type = opt.tap_rnn_type
self.rnn_num_layers = opt.rnn_num_layers
self.rnn_dropout = opt.rnn_dropout
self.K = opt.K
self.data_for_test = []
self.rnn = nn.LSTM(opt.video_dim, opt.hidden_dim, opt.rnn_num_layers, batch_first=True,
dropout=opt.rnn_dropout)
def eval(self):
self.rnn.dropout = 0
def train(self):
self.rnn.dropout = self.rnn_dropout
def forward(self, features):
if hasattr(self, 'reduce_dim_layer'):
features = self.reduce_dim_layer(features)
features = features.unsqueeze(0)
N, T, _ = features.size()
rnn_output, _ = self.rnn(features)
rnn_output = rnn_output.contiguous()
rnn_output = rnn_output.view(rnn_output.size(0) * rnn_output.size(1), rnn_output.size(2))
outputs = torch.sigmoid(self.scores(rnn_output)).view(N, T, self.K)
return rnn_output, outputs.squeeze(0) |
# coding=utf-8
##############################################################################################
# @file:acfuncomments.py
# @author:Merlin.W.OUYANG
# @date:2016/11/20
# @note:AcFun评论获取
# @modify
# @author:Jiangsiwei
# @date:2017/01/12
# @note:网站域名更新升级,原地址:http://www.acfun.tv/ 升级后地址:http://www.acfun.cn/
# 取评论逻辑没变
##############################################################################################
import json
import re
from website.common.comments import SiteComments
from log.spiderlog import Logger
import traceback
from storage.cmtstorage import CMTStorage
from storage.newsstorage import NewsStorage
from utility.timeutility import TimeUtility
from website.common.comments import SiteComments
from dao.sqldao import SQLDAO
from configuration import constant
##############################################################################################
# @class:AcfunComments
# @author:Merlin.W.OUYANG
# @date:2016/11/20
# @note:获取评论的类,继承于WebSite类
##############################################################################################
class AcfunComments(SiteComments):
COMMENTS_URL = 'http://www.acfun.cn/comment_list_json.aspx?isNeedAllCount=true&contentId=%d¤tPage=1&pageSize=%d'
PAGE_SIZE = 50
STEP_1 = None
STEP_2 = 2
STEP_3 = 3
STEP_CLICK = 4
INFO_URL = 'http://www.acfun.cn/content_view.aspx?contentId={con_id}'
##############################################################################################
# @functions:__init__
# @param: none
# @return:none
# @author:Merlin.W.OUYANG
# @date:2016/11/20
# @note:AcfunComments类的构造器,初始化内部变量
##############################################################################################
def __init__(self):
SiteComments.__init__(self)
#----------------------------------------------------------------------
##############################################################################################
# @functions:process
# @param:共通模块传入的参数(对象url, 原始url, 当前step数,自定义参数)
# @return:Step1:获取评论的首页url
# Step2:获取评论的所有url
# Step3: 抽出的评论和最新评论的创建时间
# @author:Merlin.W.ouyang
# @date:2016/11/20
# @note:Step1:根据URL获取第一页评论的URL,进入step2
# Step2:获取所有评论的那个URL页面
# Step3:由于评论不是按照时间排序,所以都取出来进行实践排序,通过实践判断获取增量评论
##############################################################################################
def process(self, params):
try:
if params.step is AcfunComments.STEP_1:
self.step1(params)
self.get_clickurl(params)
elif params.step is AcfunComments.STEP_2:
self.step2(params)
elif params.step is AcfunComments.STEP_3:
self.step3(params)
elif params.step is AcfunComments.STEP_CLICK:
self.setclick(params)
except:
Logger.printexception()
####################################################################################################################
def step1(self,params):
key = int(re.findall("\d+", params.url.split("/")[-1])[0])
comments_url = AcfunComments.COMMENTS_URL % (key, 1)
self.storeurl(comments_url, params.originalurl, AcfunComments.STEP_2,
{'key': key,
'commentcount': 0})
def step2(self,params):
comments = json.loads(params.content)
commentcount = int(comments['data']['totalCount'])
NewsStorage.setcmtnum(params.originalurl, commentcount)
# 判断增量
cmtnum = CMTStorage.getcount(params.originalurl, True)
if cmtnum >= commentcount:
return
comments_url = AcfunComments.COMMENTS_URL % (params.customized['key'], commentcount)
self.storeurl(comments_url, params.originalurl, AcfunComments.STEP_3,
{'key': params.customized['key'],
'commentcount': commentcount})
def step3(self,params):
comments = json.loads(params.content)
for id in comments['data']['commentContentArr']:
content = comments['data']['commentContentArr'][id]['content']
pubtime = comments['data']['commentContentArr'][id]['postDate']
nick = comments['data']['commentContentArr'][id]['userName']
CMTStorage.storecmt(params.originalurl, content, pubtime , nick)
####################################################################################################################
def get_clickurl(self, params):
originalurl = params.originalurl
con_id = originalurl.split('/')[-1].split('ac')[-1]
clickurl = AcfunComments.INFO_URL.format(con_id=con_id)
self.storeurl(clickurl, params.originalurl, AcfunComments.STEP_CLICK)
def setclick(self, params):
try:
content = json.loads(params.content)
# content=[播放量,评论,X,X,弹幕,收藏数,投焦数,X]
cmtnum = content[1]
clicknum = content[0]
votenum = content[-2]
fansnum =content[-3]
if not cmtnum:
cmtnum = 0
if not clicknum:
clicknum = 0
if not votenum:
votenum = 0
if not fansnum:
fansnum = 0
NewsStorage.seturlinfo(params.originalurl,
data={SQLDAO.SPIDER_TABLE_NEWS_CMTNUM: cmtnum,
SQLDAO.SPIDER_TABLE_NEWS_CLICKNUM: clicknum,
SQLDAO.SPIDER_TABLE_NEWS_VOTENUM: votenum,
SQLDAO.SPIDER_TABLE_NEWS_FANSNUM: fansnum})
except:
Logger.printexception()
|
#! /usr/bin/env python
import pygame
from random import uniform
black = 0, 0, 0
white = 255, 255, 255
green = 0, 80, 0
game_end = False
screen_size = 800, 600
game_screen = pygame.display.set_mode(screen_size)
game_rect_screen = game_screen.get_rect()
game_time = pygame.time.Clock()
pygame.display.set_caption('Pong')
pygame.mixer.init()
racket_tap_sound = pygame.mixer.Sound('./sounds/bounce.wav')
bounce_wall_sound = pygame.mixer.Sound('./sounds/tap1.wav')
lose_point_sound = pygame.mixer.Sound('./sounds/losepoint.wav')
class Racket:
def __init__(self, size):
self.image = pygame.Surface(size)
self.image.fill(green)
self.image_rect = self.image.get_rect()
self.speed = 15
self.image_rect[0] = 12
def move(self, x, y):
self.image_rect[0] += x * self.speed
self.image_rect[1] += y * self.speed
def refresh(self, key_pressed):
if key_pressed[pygame.K_UP]:
self.move(0, -1)
if key_pressed[pygame.K_DOWN]:
self.move(0, 1)
self.image_rect.clamp_ip(game_rect_screen)
def realize(self):
game_screen.blit(self.image, self.image_rect)
class Ball:
def __init__(self, size):
self.height, self.width = size
self.image = pygame.Surface(size)
self.image.fill(white)
self.image_rect = self.image.get_rect()
self.speed = 15
self.set_ball()
def random_number(self):
while True:
num = uniform(-1.0, 1.0)
if num > -.5 and num < .5:
continue
else:
return num
def set_ball(self):
x = self.random_number()
y = self.random_number()
self.image_rect.x = game_rect_screen.centerx
self.image_rect.y = game_rect_screen.centery
self.spd = [x, y]
self.position = list(game_rect_screen.center)
def wall_bounce(self):
if self.image_rect.y < 0 or self.image_rect.y > game_rect_screen.bottom - self.height:
self.spd[1] *= -1
bounce_wall_sound.play()
if self.image_rect.x < 0 or self.image_rect.x > game_rect_screen.right - self.width:
self.spd[0] *= -1
if self.image_rect.x < 0:
scoreboard1.points -= 1
lose_point_sound.play()
self.speed -= 3
if self.image_rect.x > game_rect_screen.right - self.width:
bounce_wall_sound.play()
def racket_bounce(self, racket):
if self.image_rect.colliderect(racket):
self.spd[0] *= -1
scoreboard1.points += 1
self.speed += 3
racket_tap_sound.play()
def move(self):
self.position[0] += self.spd[0] * self.speed
self.position[1] += self.spd[1] * self.speed
self.image_rect.center = self.position
def refresh(self, racket):
self.wall_bounce()
self.racket_bounce(racket)
self.move()
def realize(self):
game_screen.blit(self.image, self.image_rect)
class Scoreboard:
def __init__(self):
pygame.font.init()
self.header = pygame.font.Font(None, 36)
self.points = 5
def counting(self):
self.text = self.header.render("Points - " + str(self.points), 1, (255, 255, 255))
self.text_position = self.text.get_rect()
self.text_position.centerx = game_screen.get_width() / 2
game_screen.blit(self.text, self.text_position)
game_screen.blit(game_screen, (0, 0))
racket = Racket((15, 100))
ball = Ball((15, 15))
scoreboard1 = Scoreboard()
while not game_end:
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_end = True
key_press = pygame.key.get_pressed()
game_screen.fill(black)
racket.realize()
racket.refresh(key_press)
ball.realize()
ball.refresh(racket.image_rect)
game_time.tick(60)
scoreboard1.counting()
pygame.display.update()
if scoreboard1.points == 0:
print('You Lose!')
print('Game Over!')
game_end = True
elif scoreboard1.points == 10:
print('You Win!')
print('Game Over!')
game_end = True
|
from django.contrib import admin
from django.urls import path, include
from django.conf.urls import url
from . import views
app_name='home'
urlpatterns = [
path('', views.index, name='index'),
url(r'^timeline/(?P<id>[\w-]+)/$', views.timetable, name="schedule"),
]
# (?P<file_name>[\w.]{0,256})/$ |
'''
Please create an empty file (manually as you normally create
Python files) and name it requests.py . Make sure the file has that name exactly.
Then just paste the following code in the file (manually):
Executing the script will throw an error. Please fix that error
so that you get the expected output and explain why the error happened.
Expected output:
<!DOCTYPE html>
<!--[if IE 7]>
<html class="ie ie7" lang="en-US" prefix="og: http://ogp.me/ns#">
'''
import requests
r = requests.get("http://www.pythonhow.com")
print(r.text[:100]) |
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class GlobusAuthorizer(object):
"""
A ``GlobusAuthorizer`` is a very simple object which generates valid
Authorization headers.
It may also have handling for responses that indicate that it has provided
an invalid Authorization header.
"""
@abc.abstractmethod
def set_authorization_header(header_dict):
"""
Takes a dict of headers, and adds to it a mapping of
``{"Authorization": "..."}`` per this object's type of Authorization.
Importantly, if an ``Authorization`` header is already set, this method
is expected to overwrite it.
"""
def handle_missing_authorization(self, *args, **kwargs):
"""
This operation should be called if a request is made with an
Authorization header generated by this object which returns a 401
(HTTP Unauthorized).
If the ``GlobusAuthorizer`` thinks that it can take some action to
remedy this, it should update its state and return ``True``.
If the Authorizer cannot do anything in the event of a 401, this *may*
update state, but importantly returns ``False``.
By default, this always returns ``False`` and takes no other action.
"""
return False
|
import pygame
import random as r
UNIT = 70 # size of individual square in pixels
COUNT_W = 10 # width in units
COUNT_H = 10 # height in units
FONT_SIZE = 20
FRAMERATE = 60 # frames per second
SPEED = 20 # number of frames between movement (greater than 0)
TAIL_GROWTH = 5 # increase in tail length for each food eaten
STARTING_TAIL_LENGTH = 5 # starting length of tail
width = COUNT_W * UNIT # screen width in pixels
height = COUNT_H * UNIT + FONT_SIZE # screen height in pixels
max = COUNT_W * COUNT_H
pygame.init()
pygame.display.set_caption("snek.py")
screen = pygame.display.set_mode((width, height)) # initialize game window
font = pygame.font.SysFont("consolas", FONT_SIZE)
clock = pygame.time.Clock()
x = int
y = int
food_eaten = int
tail = [] # list will keep track of tail position
def reset_globals():
global x
global y
global food_eaten
global tail
global tail_length
x = -1
y = 0
food_eaten = 0
tail = [(x, y)]
tail_length = STARTING_TAIL_LENGTH
def off_screen(x, y):
if x < 0 or x > COUNT_W - 1: # x location is out of bounds
return True
if y < 0 or y > COUNT_H - 1: # y location is out of bounds
return True
return False # x and y are on screen
def on_tail(x, y):
if ((x, y)) in tail: # coordinates are in tail list
return True
return False
def food():
while True: # keep trying to find valid coordinates (we must be sure there are free spaces before calling this function or we will get an infinite loop)
x = r.randint(0, COUNT_W - 1)
y = r.randint(0, COUNT_H - 1)
if ((x, y)) not in tail: # coordinates are in free space
return (x, y)
food_location = food() # create food now that food() has been defined
def food_check(x, y):
global food_location # specify global so python interpreter knows to use the existing variable
global tail_length
global food_eaten
if food_location == (x, y): # if we happen to be on the food
tail_length += TAIL_GROWTH # max tail size increases
food_eaten += 1
print("Yum! You are now", tail_length, "snek units long.")
food_location = food() # get a new food location
def win_check():
if len(tail) >= max: # tail is at the maximum length that fits on the screen
return True # game is won
return False # otherwise we have not won
def increment_tail(x, y):
tail.append(tail[-1]) # put last element in list again on the end of list ([a, b, c] would become [a, b, c, c])
for i in range(len(tail) - 1, 0, -1): # move from end of list to beginning
tail[i] = tail[i - 1] # scoot element to the right
tail[0] = (x, y) # first element becomes current head of snek now that space has been made
del tail[tail_length:] # trim the tail to the right length (by deleting extra from the end)
def draw_tail():
for i in range(len(tail)):
red = 255 - 255 * (i / max) * 2 # red moves from 255 to 0 over 1/2 max snek length
if red < 0: red = 0
green = 255 - 255 * (i / max) # green moves from 255 to 0 over max snek length
if green < 0: green = 0
pygame.draw.rect(screen, (red, green, 255), pygame.Rect(tail[i][0] * UNIT, tail[i][1] * UNIT + FONT_SIZE, UNIT, UNIT)) # multiply by UNIT to make squares the right size
def draw():
screen.fill((0, 0, 0)) # fill screen with black so we can re-draw from scratch
pygame.draw.rect(screen, (255, 0, 0), pygame.Rect(food_location[0] * UNIT, food_location[1] * UNIT + FONT_SIZE, UNIT, UNIT)) # put food on the screen
draw_tail()
length_text = font.render("Length: {} ".format(len(tail)), True, (255, 255, 255))
screen.blit(length_text, (0, 0))
next_pos = length_text.get_width()
if tail_length - len(tail) > 0:
extra_food_text = font.render("+{:<2} ".format(tail_length - len(tail)), True, (255, 0, 0))
screen.blit(extra_food_text, (next_pos, 0))
next_pos = width // 3
food_text = font.render("Food eaten: " + str(food_eaten), True, (255, 255, 255))
screen.blit(food_text, (next_pos, 0))
next_pos += food_text.get_width()
screen_text = font.render(" Coverage: {:<4.1f}%".format((len(tail) * 100) / (max)), True, (255, 255, 255))
screen.blit(screen_text, (next_pos, 0))
pygame.display.flip() # updates display
def display_message(text):
buffer = 5
message = font.render(text, True, (255, 255, 255))
pygame.draw.rect(screen, (0, 0, 0), pygame.Rect(width // 2 - message.get_width() // 2 - buffer, height // 2 - message.get_height() // 2 - buffer, message.get_width() + buffer * 2, message.get_height() + buffer * 2))
screen.blit(message, (width // 2 - message.get_width() // 2, height // 2 - message.get_height() // 2))
pygame.display.flip() # updates display
def finish(text):
print(" ")
print("Results:")
print("Ate", food_eaten, "morsels of snek food total.")
print("Final length:", tail_length)
print("Screen coverage: " + str((len(tail) * 100) / (max)) + "%")
buffer = 5
message = font.render("Press [Space] to restart", True, (255, 255, 255))
pygame.draw.rect(screen, (0, 0, 0), pygame.Rect(width // 2 - message.get_width() // 2 - buffer, height // 2 - message.get_height() // 2 - buffer + FONT_SIZE + 2 * buffer, message.get_width() + buffer * 2, message.get_height() + buffer * 2))
screen.blit(message, (width // 2 - message.get_width() // 2, height // 2 - message.get_height() // 2 + FONT_SIZE + 2 * buffer))
display_message(text)
while not True:
for event in pygame.event.get():
if event.type == pygame.QUIT: # clicking X on window or ctrl+C in cmd will exit loop
return True
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
return False
clock.tick(FRAMERATE) # wait for 1/FRAMERATE seconds
def play():
reset_globals()
global x
global y
count = 0 # used to count frames since last snek update
direction = 0 # int 0-3 corresponding to the direction of movement
prev_direction = 0 # used to check new direction to prevent going straight back into tail
print(" ")
print("New snek game commencing:")
exit = False
done = False
while not (done or exit):
count += 1
for event in pygame.event.get():
if event.type == pygame.QUIT: # clicking X on window or ctrl+C in cmd will exit loop
exit = True
pressed = pygame.key.get_pressed() # get list of pressed keys (these are update every frame)
if pressed[pygame.K_UP] and prev_direction != 1: direction = 3 # set new direction and check against previous direction
if pressed[pygame.K_DOWN] and prev_direction != 3: direction = 1
if pressed[pygame.K_LEFT] and prev_direction != 0: direction = 2
if pressed[pygame.K_RIGHT] and prev_direction != 2: direction = 0
if count % SPEED == 0 and not exit: # if this is a frame we want to act on our input
if direction == 3: y -= 1 # up
if direction == 1: y += 1 # down
if direction == 2: x -= 1 # left
if direction == 0: x += 1 # right
prev_direction = direction
if off_screen(x, y): # check that we are on the screen
print("You meandered off the screen!")
done = True
exit = finish("You meandered off the screen!")
if on_tail(x, y): # check that we aren't on our tail
print("You munched your tail!")
done = True
exit = finish("You munched your tail!")
increment_tail(x, y) # move tail (internally, not drawing yet)
if win_check(): # check that we haven't used all the space
print("You win!")
done = True
draw()
exit = finish("You win!")
if not done: # only if we aren't done with the loop
food_check(x, y) # manage food
draw() # update the screen
count = 0
clock.tick(FRAMERATE) # wait for 1/FRAMERATE seconds
if exit:
print("Exiting...")
return False
return True
def main():
display_message("Press [Space] to start")
done = False
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT: # clicking X on window or ctrl+C in cmd will exit loop
done = True
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
done = True
if event.type == pygame.KEYDOWN and event.key == pygame.K_SPACE:
if not play():
done = True
clock.tick(FRAMERATE) # wait for 1/FRAMERATE seconds
print("Application closed.")
main()
|
import sys
_module = sys.modules[__name__]
del sys
cifar = _module
datasets = _module
cifar = _module
folder = _module
LinearAverage = _module
NCA = _module
lib = _module
normalize = _module
utils = _module
main = _module
models = _module
resnet = _module
resnet_cifar = _module
test = _module
from _paritybench_helpers import _mock_config, patch_functional
from unittest.mock import mock_open, MagicMock
from torch.autograd import Function
from torch.nn import Module
import abc, collections, copy, enum, functools, inspect, itertools, logging, math, matplotlib, numbers, numpy, pandas, queue, random, re, scipy, sklearn, string, tensorflow, time, torch, torchaudio, torchtext, torchvision, types, typing, uuid, warnings
import numpy as np
from torch import Tensor
patch_functional()
open = mock_open()
yaml = logging = sys = argparse = MagicMock()
ArgumentParser = argparse.ArgumentParser
_global_config = args = argv = cfg = config = params = _mock_config()
argparse.ArgumentParser.return_value.parse_args.return_value = _global_config
yaml.load.return_value = _global_config
sys.argv = _global_config
__version__ = '1.0.0'
xrange = range
wraps = functools.wraps
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import torchvision
import torchvision.transforms as transforms
import time
import math
import torchvision.datasets as datasets
import torch.utils.data as data
from torch.autograd import Function
from torch import nn
from torch.autograd import Variable
import torch.nn.parallel
import torch.distributed as dist
import torch.optim
import torch.utils.data
import torch.utils.data.distributed
import torch.utils.model_zoo as model_zoo
import numpy as np
class LinearAverageOp(Function):
@staticmethod
def forward(self, x, y, memory, params):
T = params[0].item()
batchSize = x.size(0)
out = torch.mm(x.data, memory.t())
out.div_(T)
self.save_for_backward(x, memory, y, params)
return out
@staticmethod
def backward(self, gradOutput):
x, memory, y, params = self.saved_tensors
batchSize = gradOutput.size(0)
T = params[0].item()
momentum = params[1].item()
gradOutput.data.div_(T)
gradInput = torch.mm(gradOutput.data, memory)
gradInput.resize_as_(x)
weight_pos = memory.index_select(0, y.data.view(-1)).resize_as_(x)
weight_pos.mul_(momentum)
weight_pos.add_(torch.mul(x.data, 1 - momentum))
w_norm = weight_pos.pow(2).sum(1, keepdim=True).pow(0.5)
updated_weight = weight_pos.div(w_norm)
memory.index_copy_(0, y, updated_weight)
return gradInput, None, None, None
class LinearAverage(nn.Module):
def __init__(self, inputSize, outputSize, T=0.05, momentum=0.5):
super(LinearAverage, self).__init__()
stdv = 1 / math.sqrt(inputSize)
self.nLem = outputSize
self.register_buffer('params', torch.tensor([T, momentum]))
stdv = 1.0 / math.sqrt(inputSize / 3)
self.register_buffer('memory', torch.rand(outputSize, inputSize).mul_(2 * stdv).add_(-stdv))
def forward(self, x, y):
out = LinearAverageOp.apply(x, y, self.memory, self.params)
return out
class NCACrossEntropy(nn.Module):
""" \\sum_{j=C} log(p_{ij})
Store all the labels of the dataset.
Only pass the indexes of the training instances during forward.
"""
def __init__(self, labels, margin=0):
super(NCACrossEntropy, self).__init__()
self.register_buffer('labels', torch.LongTensor(labels.size(0)))
self.labels = labels
self.margin = margin
def forward(self, x, indexes):
batchSize = x.size(0)
n = x.size(1)
exp = torch.exp(x)
y = torch.index_select(self.labels, 0, indexes.data).view(batchSize, 1)
same = y.repeat(1, n).eq_(self.labels)
exp.data.scatter_(1, indexes.data.view(-1, 1), 0)
p = torch.mul(exp, same.float()).sum(dim=1)
Z = exp.sum(dim=1)
Z_exclude = Z - p
p = p.div(math.exp(self.margin))
Z = Z_exclude + p
prob = torch.div(p, Z)
prob_masked = torch.masked_select(prob, prob.ne(0))
loss = prob_masked.log().sum(0)
return -loss / batchSize
class Normalize(nn.Module):
def __init__(self, power=2):
super(Normalize, self).__init__()
self.power = power
def forward(self, x):
norm = x.pow(self.power).sum(1, keepdim=True).pow(1.0 / self.power)
out = x.div(norm)
return out
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride=1):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion * planes:
self.shortcut = nn.Sequential(nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion * planes))
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.bn2(self.conv2(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride=1):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion * planes, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(self.expansion * planes)
self.shortcut = nn.Sequential()
if stride != 1 or in_planes != self.expansion * planes:
self.shortcut = nn.Sequential(nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(self.expansion * planes))
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = F.relu(self.bn2(self.conv2(out)))
out = self.bn3(self.conv3(out))
out += self.shortcut(x)
out = F.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, num_blocks, low_dim=128):
super(ResNet, self).__init__()
self.in_planes = 64
self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.layer1 = self._make_layer(block, 64, num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, 128, num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, 256, num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, 512, num_blocks[3], stride=2)
self.linear = nn.Linear(512 * block.expansion, low_dim)
self.l2norm = Normalize(2)
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1] * (num_blocks - 1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, x):
out = F.relu(self.bn1(self.conv1(x)))
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.avg_pool2d(out, 4)
out = out.view(out.size(0), -1)
out = self.linear(out)
out = self.l2norm(out)
return out
import torch
from torch.nn import MSELoss, ReLU
from _paritybench_helpers import _mock_config, _mock_layer, _paritybench_base, _fails_compile
TESTCASES = [
# (nn.Module, init_args, forward_args, jit_compiles)
(BasicBlock,
lambda: ([], {'in_planes': 4, 'planes': 4}),
lambda: ([torch.rand([4, 4, 4, 4])], {}),
True),
(Bottleneck,
lambda: ([], {'in_planes': 4, 'planes': 4}),
lambda: ([torch.rand([4, 4, 4, 4])], {}),
True),
(LinearAverage,
lambda: ([], {'inputSize': 4, 'outputSize': 4}),
lambda: ([torch.rand([4, 4]), torch.rand([4, 4])], {}),
False),
(Normalize,
lambda: ([], {}),
lambda: ([torch.rand([4, 4, 4, 4])], {}),
True),
]
class Test_microsoft_snca_pytorch(_paritybench_base):
def test_000(self):
self._check(*TESTCASES[0])
def test_001(self):
self._check(*TESTCASES[1])
def test_002(self):
self._check(*TESTCASES[2])
def test_003(self):
self._check(*TESTCASES[3])
|
import unittest
from datacategoryvisitors.processeddatabuilders.ProcessedDataBuilderBase import ProcessedDataBuilderBase
class ProcessedDataBuilderBaseTest (unittest.TestCase):
def setUp(self) -> None:
self._dummyProcessedDataBuilder = DummyProcessedDataBuilder()
def testGetProcessedData(self) -> None:
"""Makes sure that when the dummy dummyProcessedDataBuilder is asked for
its processed data it processes its data before returning an answer"""
solutionProcessedData = [1.0,2.0,3.0,4.0,5.0]
preprocessedData = ["Hello" , "World"]
processedData = self._dummyProcessedDataBuilder.getProcessedData(preprocessedData)
self.assertEquals(solutionProcessedData, processedData)
self.assertEquals(preprocessedData, self._dummyProcessedDataBuilder._preprocessedData)
class DummyProcessedDataBuilder (ProcessedDataBuilderBase):
"""Dummy ProcessedDataBuilder that just always builds the same processed
data"""
def _buildProcessedData(self) -> None:
self._processedData = [1.0,2.0,3.0,4.0,5.0] |
from .main import *
DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'bivver_local',
'USER': 'root',
'PASSWORD': '',
'HOST': '127.0.0.1',
'PORT': '3306'
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
}
}
|
from typer.testing import CliRunner
from rasalit.__main__ import app
from rasalit import __version__
runner = CliRunner()
def test_app():
result = runner.invoke(app, ["--help"])
assert result.exit_code == 0
def test_version():
result = runner.invoke(app, ["version"])
assert __version__ in result.stdout
|
import tstables as tstab
import pandas as pd
import tables as tb
import datetime as dt
class TsDesc(tb.IsDescription):
timestamp = tb.Int64Col(pos=0)
Last = tb.Float64Col(pos=1)
path = 'C:/Users/ivanm/Documents/Currency/AUDNZD/'
# data = pd.read_csv('C:/ticks.csv', index_col=0, parse_dates=True, decimal=',')
# data = pd.read_csv(path + 'ticks-AUDNZD-20200101120000-20210610112059.csv', index_col=0, parse_dates=True, decimal=',')
data = pd.read_csv(path + 'ticks-AUDNZD-20110101120000-20210610094559.csv', index_col=0, parse_dates=True, decimal=',')
data = pd.DataFrame(data['Last'], dtype=float)
data.info()
# h5 = tb.open_file('C:/Users/ivanm/Documents/pythi/' + 'ticks.h5', 'w')
# h5 = tb.open_file(path + 'ticks-AUDNZD-20200101120000-20210610112059.h5', 'w')
h5 = tb.open_file(path + 'ticks-AUDNZD-20110101120000-20210610094559.h5', 'w')
ts = h5.create_ts('/', 'ts', TsDesc)
ts.append(data)
print(type(ts))
print('end')
h5.close()
|
import csv
def csv_parse(file_name):
data = []
rows = []
with open("./"+file_name) as csvDataFile:
csv_reader = csv.reader(csvDataFile)
for row in csv_reader:
data.append(row)
fields = data[0]
for row in data[1:]:
new_row = [0] * 25
for i in range(25):
if i in (0,1,2,3,4,9,10,11,12,13,14,15,16,17):
new_row[i] = float(row[i])
else:
new_row[i]=row[i]
rows.append(new_row)
return fields, rows
'''header,training_data = csv_parse("DataSet.csv")
import pandas as pd
df = pd.DataFrame(training_data)
df.to_csv("test_parse.csv")''' |
from sklearn import ensemble
from sklearn.feature_extraction import DictVectorizer
from sklearn import metrics
from import_data import get_data, get_oversampled_data
from timeit import default_timer as timer
import tensorflow as tf
import numpy as np
from scipy import stats
def load_data():
vec = DictVectorizer()
file_pairs = []
for i in range(2014, 2019):
file_pairs.append([('data/players_' + str(i) + '.csv'), ('data/all_stars_' + str(i) + '.txt'), i])
var, labels, names = get_data(file_pairs)
vec = vec.fit_transform(var).toarray()
labels = np.array(labels)
vec = np.array(vec)
return vec, labels, names
def load_oversampled_data():
vec = DictVectorizer()
file_pairs = []
for i in range(2014, 2019):
file_pairs.append([('data/players_' + str(i) + '.csv'), ('data/all_stars_' + str(i) + '.txt'), i])
var, labels, names = get_oversampled_data(file_pairs)
vec = vec.fit_transform(var).toarray()
labels = np.array(labels)
vec = np.array(vec)
return vec, labels, names
def z_scorify(data):
rectified_data = []
for item in data.T:
if(np.array_equal(item, item.astype(bool))):
rectified_data.append(item)
continue
rectified_data.append(stats.zscore(item))
return np.array(rectified_data).T
def train_sklearn():
features, labels, names = load_data()
num_samples = len(labels)
classifier = ensemble.RandomForestClassifier()
names_test = names[9*(num_samples // 10):]
print("Training started")
start = timer()
# Train on the first 90% of the data
classifier.fit(features[:9*(num_samples // 10)], labels[:9*(num_samples // 10)])
end = timer()
print("Training ended, took " + str(end-start) + " seconds")
# Now predict the last 10%:
expected = labels[9*(num_samples // 10):]
predicted = classifier.predict(features[9*(num_samples // 10):])
print_metrics(expected, predicted, names_test)
def train_tf_keras():
features, labels, names = load_oversampled_data()
features = z_scorify(features)
num_samples = len(labels)
test_labels = labels[9*(num_samples // 10):]
test_features = features[9*(num_samples // 10):]
train_labels = labels[:9*(num_samples // 10)]
train_features = features[:9*(num_samples // 10)]
names_test = names[9*(num_samples // 10):]
batch_size = 512
#Define model
model = tf.keras.models.Sequential()
model.add(tf.keras.layers.Dense(64, activation='relu', input_dim=features.shape[1]))
model.add(tf.keras.layers.Dense(12, activation='relu'))
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))
sgd = tf.keras.optimizers.SGD(lr=0.01)
model.compile(optimizer=sgd, loss='binary_crossentropy', metrics=['accuracy'])
#Run model
model.fit(x=train_features, y=train_labels, batch_size=batch_size, epochs=100)
this_metrics = model.evaluate(test_features, test_labels)
print(model.metrics_names)
print(this_metrics)
print(model.predict(test_features))
preds = np.array(model.predict(test_features) > 0.5)
print_metrics(test_labels, preds, names_test)
def train_tf():
features, labels, names = load_oversampled_data()
features = z_scorify(features)
num_samples = len(labels)
test_labels = labels[9*(num_samples // 10):]
test_features = features[9*(num_samples // 10):]
train_labels = labels[:9*(num_samples // 10)]
train_features = features[:9*(num_samples // 10)]
names_test = names[9*(num_samples // 10):]
batch_size = 512
num_epochs = 10
#Define model
W = tf.Variable(tf.random_uniform([features.shape[1], 1], dtype=tf.float32))
x = tf.placeholder(tf.float32, shape=(None, features.shape[1]))
b = tf.Variable(tf.zeros([1]))
a = tf.add(tf.matmul(x, W), b)
z = tf.sigmoid(a)
predictions = tf.to_int32(z > 0.5)
targets = tf.placeholder(tf.float32, shape=[None, 1])
loss = tf.losses.log_loss(targets, z)
train = tf.train.GradientDescentOptimizer(learning_rate=0.1).minimize(loss)
init = tf.global_variables_initializer()
num_complete_batches = len(train_labels)//batch_size
with tf.Session() as sess:
sess.run(init, feed_dict={x:train_features[0:batch_size]})
for i in range(num_epochs):
for i in range(num_complete_batches):
start = i*batch_size
end = (i+1)*batch_size
sess.run(train, feed_dict={x:train_features[start:end], targets: np.expand_dims(train_labels[start:end], -1)})
sess.run(train, feed_dict={x: train_features[num_complete_batches*batch_size:], targets: np.expand_dims(train_labels[num_complete_batches*batch_size:], -1)})
if(num_epochs<10 or i%10==0):
print('Iteration ' + str(i) + ': ')
values = sess.run(predictions, feed_dict={x: test_features})
print(np.squeeze(values))
print_metrics(test_labels, np.squeeze(values), names_test)
print('Training finished')
values = sess.run(predictions, feed_dict={x: test_features})
print(np.squeeze(values))
print_metrics(test_labels, np.squeeze(values), names_test)
sess.close()
def print_metrics(expected, predicted, names=None):
print("Classification report: \n%s\n"
% (metrics.classification_report(expected, predicted)))
print("Confusion matrix:\n%s" % metrics.confusion_matrix(expected, predicted))
print("Log loss:\n%s" % metrics.log_loss(expected, predicted))
if(not names is None):
overrated = []
underrated = []
correct_positives = []
for i in range(len(expected)):
if(expected[i]<predicted[i]): #We predicted they would be all stars but they weren't
underrated.append(names[i])
elif(expected[i]>predicted[i]): #We predicted they wouldn't be all stars but they were
overrated.append(names[i])
elif(expected[i]==1 and predicted[i]==1): #Correct All-Star predictions
correct_positives.append(names[i])
print('Overrated:')
print(overrated)
print('Underrated:')
print(underrated)
print('Correct All Stars')
print(correct_positives)
def main():
train_tf()
main() |
import serial
import subprocess as sp
import time
gps = serial.Serial("/dev/ttyUSB0", baudrate=4800, timeout=5)
satellites = {}
def sat_data(gps):
global satellites
try:
line = str(gps.readline(), 'ASCII')
data = line.split(',')
if data[0] == '$GPGSV':
checksum_data = data[-1].split('*')[1].split('\r')[0]
data[-1] = data[-1].split('*')[0]
data.append(checksum_data)
data = data[4:-1]
n_sats = int(len(data) / 4)
for i in range(n_sats):
name = int(data[i * 4])
name_data = data[i * 4:(i * 4) + 4]
for e in range(len(name_data)):
if name_data[e] == '': # if SNR = '' => sat is out of sight but connected
name_data[e] = 0
else: # make all other values integers
name_data[e] = int(name_data[e])
satellites[name] = name_data # Creating satellite dictionary
return satellites
except UnicodeError:
pass
def gui():
while True:
try:
sat_data(gps)
sp.call('clear', shell=True)
print("{}:{}:{}".format(time.localtime()[3], time.localtime()[4], time.localtime()[5]))
for key, value in satellites.items():
print("Satellite {}: elevation = {}, azimuth = {}, SNR = {}".format(key, value[1], value[2], value[3]))
except KeyboardInterrupt:
break
def raw():
while True:
try:
sat_data(gps)
sp.call('clear', shell=True)
print(satellites)
except KeyboardInterrupt:
break
answer = str(input("GUI OR RAW: "))
if answer == 'GUI':
gui()
elif answer == 'RAW':
raw()
print('TERMINATED')
# https://github.com/BenjaminGC/ground-control.git
|
# encoding: utf-8
# module PyQt4.QtGui
# from C:\Python27\lib\site-packages\PyQt4\QtGui.pyd
# by generator 1.145
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
from QAbstractScrollArea import QAbstractScrollArea
class QTextEdit(QAbstractScrollArea):
"""
QTextEdit(QWidget parent=None)
QTextEdit(QString, QWidget parent=None)
"""
def acceptRichText(self): # real signature unknown; restored from __doc__
""" QTextEdit.acceptRichText() -> bool """
return False
def actionEvent(self, *args, **kwargs): # real signature unknown
pass
def alignment(self): # real signature unknown; restored from __doc__
""" QTextEdit.alignment() -> Qt.Alignment """
pass
def anchorAt(self, QPoint): # real signature unknown; restored from __doc__
""" QTextEdit.anchorAt(QPoint) -> QString """
pass
def append(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.append(QString) """
pass
def autoFormatting(self): # real signature unknown; restored from __doc__
""" QTextEdit.autoFormatting() -> QTextEdit.AutoFormatting """
pass
def canInsertFromMimeData(self, QMimeData): # real signature unknown; restored from __doc__
""" QTextEdit.canInsertFromMimeData(QMimeData) -> bool """
return False
def canPaste(self): # real signature unknown; restored from __doc__
""" QTextEdit.canPaste() -> bool """
return False
def changeEvent(self, QEvent): # real signature unknown; restored from __doc__
""" QTextEdit.changeEvent(QEvent) """
pass
def childEvent(self, *args, **kwargs): # real signature unknown
pass
def clear(self): # real signature unknown; restored from __doc__
""" QTextEdit.clear() """
pass
def closeEvent(self, *args, **kwargs): # real signature unknown
pass
def connectNotify(self, *args, **kwargs): # real signature unknown
pass
def contextMenuEvent(self, QContextMenuEvent): # real signature unknown; restored from __doc__
""" QTextEdit.contextMenuEvent(QContextMenuEvent) """
pass
def copy(self): # real signature unknown; restored from __doc__
""" QTextEdit.copy() """
pass
def copyAvailable(self, *args, **kwargs): # real signature unknown
""" QTextEdit.copyAvailable[bool] [signal] """
pass
def create(self, *args, **kwargs): # real signature unknown
pass
def createMimeDataFromSelection(self): # real signature unknown; restored from __doc__
""" QTextEdit.createMimeDataFromSelection() -> QMimeData """
pass
def createStandardContextMenu(self, QPoint=None): # real signature unknown; restored from __doc__ with multiple overloads
"""
QTextEdit.createStandardContextMenu() -> QMenu
QTextEdit.createStandardContextMenu(QPoint) -> QMenu
"""
return QMenu
def currentCharFormat(self): # real signature unknown; restored from __doc__
""" QTextEdit.currentCharFormat() -> QTextCharFormat """
return QTextCharFormat
def currentCharFormatChanged(self, *args, **kwargs): # real signature unknown
""" QTextEdit.currentCharFormatChanged[QTextCharFormat] [signal] """
pass
def currentFont(self): # real signature unknown; restored from __doc__
""" QTextEdit.currentFont() -> QFont """
return QFont
def cursorForPosition(self, QPoint): # real signature unknown; restored from __doc__
""" QTextEdit.cursorForPosition(QPoint) -> QTextCursor """
return QTextCursor
def cursorPositionChanged(self, *args, **kwargs): # real signature unknown
""" QTextEdit.cursorPositionChanged [signal] """
pass
def cursorRect(self, QTextCursor=None): # real signature unknown; restored from __doc__ with multiple overloads
"""
QTextEdit.cursorRect(QTextCursor) -> QRect
QTextEdit.cursorRect() -> QRect
"""
pass
def cursorWidth(self): # real signature unknown; restored from __doc__
""" QTextEdit.cursorWidth() -> int """
return 0
def customEvent(self, *args, **kwargs): # real signature unknown
pass
def cut(self): # real signature unknown; restored from __doc__
""" QTextEdit.cut() """
pass
def destroy(self, *args, **kwargs): # real signature unknown
pass
def disconnectNotify(self, *args, **kwargs): # real signature unknown
pass
def document(self): # real signature unknown; restored from __doc__
""" QTextEdit.document() -> QTextDocument """
return QTextDocument
def documentTitle(self): # real signature unknown; restored from __doc__
""" QTextEdit.documentTitle() -> QString """
pass
def dragEnterEvent(self, QDragEnterEvent): # real signature unknown; restored from __doc__
""" QTextEdit.dragEnterEvent(QDragEnterEvent) """
pass
def dragLeaveEvent(self, QDragLeaveEvent): # real signature unknown; restored from __doc__
""" QTextEdit.dragLeaveEvent(QDragLeaveEvent) """
pass
def dragMoveEvent(self, QDragMoveEvent): # real signature unknown; restored from __doc__
""" QTextEdit.dragMoveEvent(QDragMoveEvent) """
pass
def drawFrame(self, *args, **kwargs): # real signature unknown
pass
def dropEvent(self, QDropEvent): # real signature unknown; restored from __doc__
""" QTextEdit.dropEvent(QDropEvent) """
pass
def enabledChange(self, *args, **kwargs): # real signature unknown
pass
def ensureCursorVisible(self): # real signature unknown; restored from __doc__
""" QTextEdit.ensureCursorVisible() """
pass
def enterEvent(self, *args, **kwargs): # real signature unknown
pass
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QTextEdit.event(QEvent) -> bool """
return False
def extraSelections(self): # real signature unknown; restored from __doc__
""" QTextEdit.extraSelections() -> list-of-QTextEdit.ExtraSelection """
pass
def find(self, QString, QTextDocument_FindFlags_options=0): # real signature unknown; restored from __doc__
""" QTextEdit.find(QString, QTextDocument.FindFlags options=0) -> bool """
return False
def focusInEvent(self, QFocusEvent): # real signature unknown; restored from __doc__
""" QTextEdit.focusInEvent(QFocusEvent) """
pass
def focusNextChild(self, *args, **kwargs): # real signature unknown
pass
def focusNextPrevChild(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.focusNextPrevChild(bool) -> bool """
return False
def focusOutEvent(self, QFocusEvent): # real signature unknown; restored from __doc__
""" QTextEdit.focusOutEvent(QFocusEvent) """
pass
def focusPreviousChild(self, *args, **kwargs): # real signature unknown
pass
def fontChange(self, *args, **kwargs): # real signature unknown
pass
def fontFamily(self): # real signature unknown; restored from __doc__
""" QTextEdit.fontFamily() -> QString """
pass
def fontItalic(self): # real signature unknown; restored from __doc__
""" QTextEdit.fontItalic() -> bool """
return False
def fontPointSize(self): # real signature unknown; restored from __doc__
""" QTextEdit.fontPointSize() -> float """
return 0.0
def fontUnderline(self): # real signature unknown; restored from __doc__
""" QTextEdit.fontUnderline() -> bool """
return False
def fontWeight(self): # real signature unknown; restored from __doc__
""" QTextEdit.fontWeight() -> int """
return 0
def hideEvent(self, *args, **kwargs): # real signature unknown
pass
def inputMethodEvent(self, QInputMethodEvent): # real signature unknown; restored from __doc__
""" QTextEdit.inputMethodEvent(QInputMethodEvent) """
pass
def inputMethodQuery(self, Qt_InputMethodQuery): # real signature unknown; restored from __doc__
""" QTextEdit.inputMethodQuery(Qt.InputMethodQuery) -> QVariant """
pass
def insertFromMimeData(self, QMimeData): # real signature unknown; restored from __doc__
""" QTextEdit.insertFromMimeData(QMimeData) """
pass
def insertHtml(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.insertHtml(QString) """
pass
def insertPlainText(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.insertPlainText(QString) """
pass
def isReadOnly(self): # real signature unknown; restored from __doc__
""" QTextEdit.isReadOnly() -> bool """
return False
def isUndoRedoEnabled(self): # real signature unknown; restored from __doc__
""" QTextEdit.isUndoRedoEnabled() -> bool """
return False
def keyPressEvent(self, QKeyEvent): # real signature unknown; restored from __doc__
""" QTextEdit.keyPressEvent(QKeyEvent) """
pass
def keyReleaseEvent(self, QKeyEvent): # real signature unknown; restored from __doc__
""" QTextEdit.keyReleaseEvent(QKeyEvent) """
pass
def languageChange(self, *args, **kwargs): # real signature unknown
pass
def leaveEvent(self, *args, **kwargs): # real signature unknown
pass
def lineWrapColumnOrWidth(self): # real signature unknown; restored from __doc__
""" QTextEdit.lineWrapColumnOrWidth() -> int """
return 0
def lineWrapMode(self): # real signature unknown; restored from __doc__
""" QTextEdit.lineWrapMode() -> QTextEdit.LineWrapMode """
pass
def loadResource(self, p_int, QUrl): # real signature unknown; restored from __doc__
""" QTextEdit.loadResource(int, QUrl) -> QVariant """
pass
def mergeCurrentCharFormat(self, QTextCharFormat): # real signature unknown; restored from __doc__
""" QTextEdit.mergeCurrentCharFormat(QTextCharFormat) """
pass
def metric(self, *args, **kwargs): # real signature unknown
pass
def mouseDoubleClickEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QTextEdit.mouseDoubleClickEvent(QMouseEvent) """
pass
def mouseMoveEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QTextEdit.mouseMoveEvent(QMouseEvent) """
pass
def mousePressEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QTextEdit.mousePressEvent(QMouseEvent) """
pass
def mouseReleaseEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QTextEdit.mouseReleaseEvent(QMouseEvent) """
pass
def moveCursor(self, QTextCursor_MoveOperation, QTextCursor_MoveMode_mode=None): # real signature unknown; restored from __doc__
""" QTextEdit.moveCursor(QTextCursor.MoveOperation, QTextCursor.MoveMode mode=QTextCursor.MoveAnchor) """
pass
def moveEvent(self, *args, **kwargs): # real signature unknown
pass
def overwriteMode(self): # real signature unknown; restored from __doc__
""" QTextEdit.overwriteMode() -> bool """
return False
def paintEvent(self, QPaintEvent): # real signature unknown; restored from __doc__
""" QTextEdit.paintEvent(QPaintEvent) """
pass
def paletteChange(self, *args, **kwargs): # real signature unknown
pass
def paste(self): # real signature unknown; restored from __doc__
""" QTextEdit.paste() """
pass
def print_(self, QPrinter): # real signature unknown; restored from __doc__
""" QTextEdit.print_(QPrinter) """
pass
def receivers(self, *args, **kwargs): # real signature unknown
pass
def redo(self): # real signature unknown; restored from __doc__
""" QTextEdit.redo() """
pass
def redoAvailable(self, *args, **kwargs): # real signature unknown
""" QTextEdit.redoAvailable[bool] [signal] """
pass
def resetInputContext(self, *args, **kwargs): # real signature unknown
pass
def resizeEvent(self, QResizeEvent): # real signature unknown; restored from __doc__
""" QTextEdit.resizeEvent(QResizeEvent) """
pass
def scrollContentsBy(self, p_int, p_int_1): # real signature unknown; restored from __doc__
""" QTextEdit.scrollContentsBy(int, int) """
pass
def scrollToAnchor(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.scrollToAnchor(QString) """
pass
def selectAll(self): # real signature unknown; restored from __doc__
""" QTextEdit.selectAll() """
pass
def selectionChanged(self, *args, **kwargs): # real signature unknown
""" QTextEdit.selectionChanged [signal] """
pass
def sender(self, *args, **kwargs): # real signature unknown
pass
def senderSignalIndex(self, *args, **kwargs): # real signature unknown
pass
def setAcceptRichText(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setAcceptRichText(bool) """
pass
def setAlignment(self, Qt_Alignment): # real signature unknown; restored from __doc__
""" QTextEdit.setAlignment(Qt.Alignment) """
pass
def setAutoFormatting(self, QTextEdit_AutoFormatting): # real signature unknown; restored from __doc__
""" QTextEdit.setAutoFormatting(QTextEdit.AutoFormatting) """
pass
def setCurrentCharFormat(self, QTextCharFormat): # real signature unknown; restored from __doc__
""" QTextEdit.setCurrentCharFormat(QTextCharFormat) """
pass
def setCurrentFont(self, QFont): # real signature unknown; restored from __doc__
""" QTextEdit.setCurrentFont(QFont) """
pass
def setCursorWidth(self, p_int): # real signature unknown; restored from __doc__
""" QTextEdit.setCursorWidth(int) """
pass
def setDocument(self, QTextDocument): # real signature unknown; restored from __doc__
""" QTextEdit.setDocument(QTextDocument) """
pass
def setDocumentTitle(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.setDocumentTitle(QString) """
pass
def setExtraSelections(self, list_of_QTextEdit_ExtraSelection): # real signature unknown; restored from __doc__
""" QTextEdit.setExtraSelections(list-of-QTextEdit.ExtraSelection) """
pass
def setFontFamily(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.setFontFamily(QString) """
pass
def setFontItalic(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setFontItalic(bool) """
pass
def setFontPointSize(self, p_float): # real signature unknown; restored from __doc__
""" QTextEdit.setFontPointSize(float) """
pass
def setFontUnderline(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setFontUnderline(bool) """
pass
def setFontWeight(self, p_int): # real signature unknown; restored from __doc__
""" QTextEdit.setFontWeight(int) """
pass
def setHtml(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.setHtml(QString) """
pass
def setLineWrapColumnOrWidth(self, p_int): # real signature unknown; restored from __doc__
""" QTextEdit.setLineWrapColumnOrWidth(int) """
pass
def setLineWrapMode(self, QTextEdit_LineWrapMode): # real signature unknown; restored from __doc__
""" QTextEdit.setLineWrapMode(QTextEdit.LineWrapMode) """
pass
def setOverwriteMode(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setOverwriteMode(bool) """
pass
def setPlainText(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.setPlainText(QString) """
pass
def setReadOnly(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setReadOnly(bool) """
pass
def setTabChangesFocus(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setTabChangesFocus(bool) """
pass
def setTabStopWidth(self, p_int): # real signature unknown; restored from __doc__
""" QTextEdit.setTabStopWidth(int) """
pass
def setText(self, QString): # real signature unknown; restored from __doc__
""" QTextEdit.setText(QString) """
pass
def setTextBackgroundColor(self, QColor): # real signature unknown; restored from __doc__
""" QTextEdit.setTextBackgroundColor(QColor) """
pass
def setTextColor(self, QColor): # real signature unknown; restored from __doc__
""" QTextEdit.setTextColor(QColor) """
pass
def setTextCursor(self, QTextCursor): # real signature unknown; restored from __doc__
""" QTextEdit.setTextCursor(QTextCursor) """
pass
def setTextInteractionFlags(self, Qt_TextInteractionFlags): # real signature unknown; restored from __doc__
""" QTextEdit.setTextInteractionFlags(Qt.TextInteractionFlags) """
pass
def setUndoRedoEnabled(self, bool): # real signature unknown; restored from __doc__
""" QTextEdit.setUndoRedoEnabled(bool) """
pass
def setupViewport(self, *args, **kwargs): # real signature unknown
pass
def setViewportMargins(self, *args, **kwargs): # real signature unknown
pass
def setWordWrapMode(self, QTextOption_WrapMode): # real signature unknown; restored from __doc__
""" QTextEdit.setWordWrapMode(QTextOption.WrapMode) """
pass
def showEvent(self, QShowEvent): # real signature unknown; restored from __doc__
""" QTextEdit.showEvent(QShowEvent) """
pass
def tabChangesFocus(self): # real signature unknown; restored from __doc__
""" QTextEdit.tabChangesFocus() -> bool """
return False
def tabletEvent(self, *args, **kwargs): # real signature unknown
pass
def tabStopWidth(self): # real signature unknown; restored from __doc__
""" QTextEdit.tabStopWidth() -> int """
return 0
def textBackgroundColor(self): # real signature unknown; restored from __doc__
""" QTextEdit.textBackgroundColor() -> QColor """
return QColor
def textChanged(self, *args, **kwargs): # real signature unknown
""" QTextEdit.textChanged [signal] """
pass
def textColor(self): # real signature unknown; restored from __doc__
""" QTextEdit.textColor() -> QColor """
return QColor
def textCursor(self): # real signature unknown; restored from __doc__
""" QTextEdit.textCursor() -> QTextCursor """
return QTextCursor
def textInteractionFlags(self): # real signature unknown; restored from __doc__
""" QTextEdit.textInteractionFlags() -> Qt.TextInteractionFlags """
pass
def timerEvent(self, QTimerEvent): # real signature unknown; restored from __doc__
""" QTextEdit.timerEvent(QTimerEvent) """
pass
def toHtml(self): # real signature unknown; restored from __doc__
""" QTextEdit.toHtml() -> QString """
pass
def toPlainText(self): # real signature unknown; restored from __doc__
""" QTextEdit.toPlainText() -> QString """
pass
def undo(self): # real signature unknown; restored from __doc__
""" QTextEdit.undo() """
pass
def undoAvailable(self, *args, **kwargs): # real signature unknown
""" QTextEdit.undoAvailable[bool] [signal] """
pass
def updateMicroFocus(self, *args, **kwargs): # real signature unknown
pass
def viewportEvent(self, *args, **kwargs): # real signature unknown
pass
def wheelEvent(self, QWheelEvent): # real signature unknown; restored from __doc__
""" QTextEdit.wheelEvent(QWheelEvent) """
pass
def windowActivationChange(self, *args, **kwargs): # real signature unknown
pass
def winEvent(self, *args, **kwargs): # real signature unknown
pass
def wordWrapMode(self): # real signature unknown; restored from __doc__
""" QTextEdit.wordWrapMode() -> QTextOption.WrapMode """
pass
def zoomIn(self, int_range=1): # real signature unknown; restored from __doc__
""" QTextEdit.zoomIn(int range=1) """
pass
def zoomOut(self, int_range=1): # real signature unknown; restored from __doc__
""" QTextEdit.zoomOut(int range=1) """
pass
def __init__(self, *__args): # real signature unknown; restored from __doc__ with multiple overloads
pass
AutoAll = -1
AutoBulletList = 1
AutoNone = 0
FixedColumnWidth = 3
FixedPixelWidth = 2
NoWrap = 0
WidgetWidth = 1
|
#jug problem
#euclid's theorem
x=int(input("enter the size of first jug "))
y=int(input("enter the size of second jug "))
a=0
b=0
needed=int(input("enter the remaining size needed"))
if needed==x:
print("the solution is 1 gallon of jug 1 and 0 gallons of jug 2" )
elif needed==y:
print("the solution is 0 gallon of jug 1 and 1 gallons of jug 2" )
else :
while not(a!=needed and b==needed) and not(a==needed and b!=needed):
if a==0:
a=x
elif (b+a)<=y:
b=b+a
a=0
elif b==y:
b=0
else:
a=a-y+b
b=y
print("a: ",a, "b: ",b)
|
from flask import Flask
from flask_restful import Api
from flask_cors import CORS
from flask_sqlalchemy import SQLAlchemy
from hashlib import sha256
from controllers import userController, mainControllers
from database import db
from models import Organization, User
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///static/db/gameofde.db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
CORS(app)
db.init_app(app)
db.drop_all(app=app)
db.create_all(app=app)
api = Api(app)
api.add_resource(userController.LoginController, '/login')
api.add_resource(userController.CreateAccountController, '/create_account')
api.add_resource(userController.UserList, '/users')
api.add_resource(userController.UserController, '/users/<user_id>')
api.add_resource(mainControllers.OrganizationList, '/orgs')
api.add_resource(mainControllers.OrganizationController, '/orgs/<org_name>')
api.add_resource(mainControllers.NewsController, '/news/<org_name>')
api.add_resource(mainControllers.CaesarController, '/caesar')
api.add_resource(mainControllers.NoAuthCaesarController, '/test_caesar')
api.add_resource(mainControllers.CheckSolutionController, '/solutions')
api.add_resource(mainControllers.SavedSolutionsController, '/saved_solutions')
api.add_resource(mainControllers.SavedSolutionController, '/saved_solutions/<solution_id>')
def populate_db():
with app.app_context():
try:
db.session.add(Organization('CIA'))
db.session.add(Organization('DEA'))
db.session.add(Organization('DHS'))
db.session.add(Organization('FBI'))
db.session.add(Organization('KFC'))
db.session.add(Organization('NSA'))
db.session.add(User('leon', sha256('test'.encode()).hexdigest(), 'leon@email.net', 'admin', 5))
db.session.add(User('student', sha256('ist440'.encode()).hexdigest(), 'student@psu.edu', 'admin', 4))
db.session.add(User('bob', sha256('lazar'.encode()).hexdigest(), 'boblazar@ufosarereal.org', 'standard', 4))
db.session.add(User('john', sha256('smith'.encode()).hexdigest(), 'johnsmith@psu.edu', 'standard', 4))
db.session.add(User('cheney', sha256('dick'.encode()).hexdigest(), 'dickcheney@whitehouse.gov', 'admin', 1))
db.session.add(User('skywalker', sha256('luke'.encode()).hexdigest(), 'luke@skywalker.com', 'admin', 2))
db.session.add(User('palpatine', sha256('sidious'.encode()).hexdigest(), 'sheev@naboomail.net', 'admin', 3))
db.session.commit()
except: pass
populate_db()
if __name__ == '__main__':
app.run(debug=True)
|
from django.conf.urls import url
from api.views_set.classroom import *
urlpatterns = [
url(r'^create/', create),
url(r'^update/', update),
url(r'^close/', close),
url(r'^get_list/', get_list),
url(r'^search/', search),
url(r'^send_request/', send_request_to_join),
url(r'^approve_request/', approve_request)
] |
"""
======================COPYRIGHT/LICENSE START==========================
write.py: code for CCPN data model and code generation framework
Copyright (C) 2012 (CCPN Project)
=======================================================================
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
A copy of this license can be found in ../../../license/LGPL.license
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
======================COPYRIGHT/LICENSE END============================
for further information, please contact :
- CCPN website (http://www.ccpn.ac.uk/)
- email: ccpn@bioc.cam.ac.uk
=======================================================================
If you are using this software for academic purposes, we suggest
quoting the following references:
===========================REFERENCE START=============================
R. Fogh, J. Ionides, E. Ulrich, W. Boucher, W. Vranken, J.P. Linge, M.
Habeck, W. Rieping, T.N. Bhat, J. Westbrook, K. Henrick, G. Gilliland,
H. Berman, J. Thornton, M. Nilges, J. Markley and E. Laue (2002). The
CCPN project: An interim report on a data model for the NMR community
(Progress report). Nature Struct. Biol. 9, 416-418.
Rasmus H. Fogh, Wayne Boucher, Wim F. Vranken, Anne
Pajon, Tim J. Stevens, T.N. Bhat, John Westbrook, John M.C. Ionides and
Ernest D. Laue (2005). A framework for scientific data modeling and automated
software development. Bioinformatics 21, 1678-1684.
===========================REFERENCE END===============================
"""
import sys, os, operator, json, shutil
from ccpnmr.integrator.core import Io as intIo
#from ccpnmr.integrator.core import Util as intUtil
#from ccpnmr.integrator.plugins.Unio import Util as unioUtil
from memops.universal import Io as uniIo
pluginName = 'ccpnmr.integrator.plugins.Unio'
# Unio bin environment variable, showing where execute scripts live
programBin = '${UNIO_BIN}'
# Unio xplor environment variable, showing where xplor template files live
unioXplor = '${UNIO_XPLOR}'
def write(nmrCalcRun, targetDir):
""" Write input files for Program run
Input:
nmrCalcRun: NmrCalc.Run
targetDir: destination directory.
"""
intIo.writeDataFiles(nmrCalcRun, targetDir)
jsonDict = intIo.makeJsonDict(nmrCalcRun)
# write properties file
propFile = uniIo.joinPath(targetDir, intIo.propFileName)
print 'About to write', propFile
open(propFile,'w').write(json.dumps(jsonDict, sort_keys=True,
indent=intIo.propIndent))
# Write program setup file
fileNameObj = nmrCalcRun.findFirstRunParameter(name='fileNameSetup')
if fileNameObj is not None:
filePath = uniIo.joinPath(targetDir, fileNameObj.textValue)
writeSetupFile(filePath, jsonDict)
def writeSetupFile(filePath, jsonDict):
""" Write UNIO-style setup file with parameters in jsonDict
"""
lineFormat = "%s = %s\n"
dataLineFormat = "%s[%d] = %s\n"
fp = open(filePath, 'w')
try:
fp.write("# UNIO setup file - Generated by CcpNmr Integrator\n\n")
for tag in ('CCPN.NmrCalcStore.name', 'CCPN.NmrCalcIdentifier'):
fp.write(lineFormat % (tag, jsonDict[tag]))
fp.write('\n')
for item in sorted(jsonDict['RunParameter'].items()):
fp.write(lineFormat % item)
fp.write('\n')
indx = 0
for dd in jsonDict['PeakListData']:
indx += 1
for tag, val in sorted(dd.items()):
if tag != 'serial':
fp.write(dataLineFormat % (tag, indx, val))
fp.write('\n')
'''
result['CCPN.NmrCalcStore.name'] = containerObj.nmrCalcStore.name
result['CCPN.NmrCalcIdentifier'] = intUtil.getNmrCalcIdentifier(containerObj)
data = {0:{}}
# divide parameters accoring to index
for tag,val in sorted(paramDict.items()):
if tag.endswith(']'):
# param of type myname[2]
indx = int(tag[:-1].split('[')[-1])
if indx not in data:
data[indx] = {}
else:
# param of type myname
indx = 0
data[indx][tag] = val
# print parameters out, index for index.
for indx in data:
for item in sorted(data[indx].items()):
fp.write("%s = %s\n" % item)
fp.write('\n')'''
finally:
fp.close()
def prepareLocalExecution(nmrCalcRun, targetDir):
""" return [procArgs, targetDir, logFile list for later, local execution
And carry out any preliminary commands
"""
# Set up parameters for program call
shellParObj = nmrCalcRun.findFirstRunParameter(name='programCall')
if shellParObj is None:
raise Exception("Parameter name=programCall not found")
else:
shellCall = shellParObj.textValue
executeScript = os.path.join(os.path.expandvars(programBin), shellCall)
fileNameObj = nmrCalcRun.findFirstRunParameter(name='fileNameSetup')
if fileNameObj is None:
raise Exception("no 'fileNameSetup' parameter found")
filePath = uniIo.joinPath(targetDir, fileNameObj.textValue)
procargs = [executeScript, '-f', filePath]
algObj = nmrCalcRun.findFirstRunParameter(name='MDAlgorithm')
if 'XPLOR' in algObj.textValue.upper():
# copy xplor template files
for fname in ('sa.inp', 'generate.inp', 'generate_template.inp'):
src = os.path.join(os.path.expandvars(unioXplor), fname)
shutil.copy2(src, targetDir)
#
return (procargs, targetDir)
if __name__ == '__main__':
""" Run write function from command line.
Input is projectDir NmrCalcRun.IDstring
projectDir must contain the desired project (and no others)
NmrCalcRun.IDstring is of the form
'%s+%s' % (NmrCalcStore.guid, Run.serial)
"""
from memops.general import Io as genIo
if len(sys.argv) >= 3:
# set up input
junk, projectDir, nmrCalcRunId = sys.argv
if len(sys.argv) >= 4:
targetDir = sys.argv[3]
else:
targetDir=None
#intIo.prepareStdWmsRun(pluginName, projectDir, nmrCalcRunId, targetDir)
# NB necessary.
# As side effect prints message that passes newCalcId to WMS Java
nmrCalcRun = intIo.getNmrCalcRun(projectDir, nmrCalcRunId, pluginName)
else:
#print "Usage: write projectDir NmrCalcRun.IDstring"
print "Usage: write projectDir NmrCalcRun.IDstring targetDir(optional)"
|
from HTMLTestRunner import HTMLTestRunner
import unittest
import os
import time
# os.path.abspath(__file__) #返回当前文件的绝对路径
# os.path.dirname(os.path.abspath(__file__)) #返回当前文件的目录
# print(os.path.dirname(os.path.abspath(__file__)))
###########################################################################################
# print(os.getcwd()) #返回的是当前工作目录
# !!!当前工作路径 working directory 就是脚本运行/调用/执行的地方,而不是脚本本身的地方。
#获取项目根目录
start_dir=os.path.dirname(os.path.abspath(__file__))
testsuite=unittest.defaultTestLoader.discover(start_dir=start_dir,pattern='test*.py')
now=time.strftime("%Y-%m-%d %H_%M_%S")
fp=open("./report/result_"+now+".html",'wb')
# stream 指定生成HTML测试报告的文件
# verbosity 日志的级别,默认为1
# title 用例的标题,默认为None
# description 用例的描述,默认为None
runner=HTMLTestRunner(stream=fp,title="shopxo test report",description='None',verbosity=2)
runner.run(testsuite)
fp.close() |
# Copyright 2015, Kay Hayen, mailto:kay.hayen@gmail.com
#
# Part of "Nuitka", an optimizing Python compiler that is compatible and
# integrates with CPython, but also works on its own.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Nodes for try/except
The try/except is re-formulated to only one handler. The different catches
are conditional statements and not the issue anymore.
"""
from .NodeBases import StatementChildrenHavingBase
class StatementTryExcept(StatementChildrenHavingBase):
kind = "STATEMENT_TRY_EXCEPT"
named_children = (
"tried",
"handling"
)
def __init__(self, tried, handling, public_exc, source_ref):
self.public_exc = public_exc
StatementChildrenHavingBase.__init__(
self,
values = {
"tried" : tried,
"handling" : handling
},
source_ref = source_ref
)
assert type(public_exc) is bool
getBlockTry = StatementChildrenHavingBase.childGetter(
"tried"
)
setBlockTry = StatementChildrenHavingBase.childSetter(
"tried"
)
getExceptionHandling = StatementChildrenHavingBase.childGetter(
"handling"
)
def isStatementAborting(self):
tried_block = self.getBlockTry()
handling = self.getExceptionHandling()
if tried_block is not None and tried_block.isStatementAborting() and \
handling is not None and handling.isStatementAborting():
return True
return False
def mayRaiseException(self, exception_type):
tried = self.getBlockTry()
if tried is None:
return False
handling = self.getExceptionHandling()
if handling is None:
return False
return handling.mayRaiseException(exception_type) and \
tried.mayRaiseException(exception_type)
def mayReturn(self):
handling = self.getExceptionHandling()
if handling is not None and handling.mayReturn():
return True
tried = self.getBlockTry()
if tried is not None and tried.mayReturn():
return True
return False
def mayBreak(self):
handling = self.getExceptionHandling()
if handling is not None and handling.mayBreak():
return True
tried = self.getBlockTry()
if tried is not None and tried.mayBreak():
return True
return False
def mayContinue(self):
handling = self.getExceptionHandling()
if handling is not None and handling.mayContinue():
return True
tried = self.getBlockTry()
if tried is not None and tried.mayContinue():
return True
return False
def needsFrame(self):
return True
def needsExceptionPublish(self):
return self.public_exc
def computeStatement(self, constraint_collection):
# The tried block can be processed normally.
tried_statement_sequence = self.getBlockTry()
# May be "None" from the outset, so guard against that, later we are
# going to remove it.
if tried_statement_sequence is not None:
result = tried_statement_sequence.computeStatementsSequence(
constraint_collection = constraint_collection
)
if result is not tried_statement_sequence:
self.setBlockTry(result)
tried_statement_sequence = result
if tried_statement_sequence is None:
return None, "new_statements", """\
Removed try/except with empty tried block."""
# TODO: Need not to remove all knowledge, but only the parts that were
# touched.
constraint_collection.removeAllKnowledge()
if self.getExceptionHandling() is not None:
from nuitka.optimizations.ConstraintCollections import \
ConstraintCollectionBranch
_collection_exception_handling = ConstraintCollectionBranch(
parent = constraint_collection,
branch = self.getExceptionHandling()
)
# Without exception handlers remaining, nothing else to do. They may
# e.g. be removed as only re-raising.
if self.getExceptionHandling() and \
self.getExceptionHandling().getStatements()[0].\
isStatementReraiseException():
return tried_statement_sequence, "new_statements", """\
Removed try/except without any remaing handlers."""
# Remove exception handling, if it cannot happen.
if not tried_statement_sequence.mayRaiseException(BaseException):
return tried_statement_sequence, "new_statements", """\
Removed try/except with tried block that cannot raise."""
# Give up, merging this is too hard for now, any amount of the tried
# sequence may have executed together with one of the handlers, or all
# of tried and no handlers. TODO: improve this to an actual merge, even
# if a pessimistic one.
constraint_collection.removeAllKnowledge()
return self, None, None
|
import fire
import json
from sultan.api import Sultan
import os.path
TEMP_PATH = "temp/"
class EnvDeploy:
def down(self, name, skip_db=False):
self.app(name, True)
self.ups_service(name, True)
if skip_db is False:
self.execute_db_task(name=name, delete=True)
self.postgres_service(name, True)
def up(self, name, db_name="digitalmarketplace", skip_db=False):
if skip_db is False:
self.postgres_service(name)
self.execute_db_task(name, db_name)
self.generate_config_files(name)
self.ups_service(name)
self.generate_manifest_files(name)
self.app(name)
def stop(self, name):
with Sultan.load() as s:
app_names = self.__get_app_names(name)
for k, v in app_names.iteritems():
self.__print_result(s.cf("stop", v).run())
def start(self, name):
with Sultan.load() as s:
app_names = self.__get_app_names(name)
for k, v in app_names.iteritems():
self.__print_result(s.cf("start", v).run())
def ups_secret_service(self, secret_file, delete=False):
with Sultan.load() as s:
if delete is True:
self.__print_result(s.echo("y").pipe().cf("delete-service", "ups-secret-service").run())
else:
self.__print_result(
s.cf("create-user-provided-service",
"ups-secret-service",
"-p {}".format(secret_file))
.run())
def ups_service(self, name, delete=False):
ups_service_names = self.__get_ups_service_names(name)
with Sultan.load() as s:
common_config_name = self.__get_common_config(name)
if delete is True:
self.__print_result(s.echo("y").pipe().cf("delete-service", common_config_name).run())
for k, v in ups_service_names.iteritems():
self.__print_result(s.echo("y").pipe().cf("delete-service", v).run())
else:
self.__print_result(
s.cf("create-user-provided-service",
common_config_name,
"-p {}{}.json".format(TEMP_PATH, common_config_name))
.run())
for k, v in ups_service_names.iteritems():
file = "{}{}.json".format(TEMP_PATH, v)
if os.path.exists(file):
self.__print_result(
s.cf("create-user-provided-service", v, "-p {}".format(file)).run())
else:
print("Cannot find '{}'. Execute python generate_config_files {} first".format(file, name))
def postgres_service(self, name, delete=False):
postgres_service_name = self.__get_postgres_service_name(name)
with Sultan.load() as s:
if delete is True:
self.__print_result(s.echo("y").pipe().cf("delete-service", postgres_service_name).run())
else:
self.__print_result(s.cf("create-service", "postgres", "shared", postgres_service_name).run())
self.__print_result(s.cf("update-service", postgres_service_name, "-c '{\"extensions\":[\"pg_trgm\"]}'").run())
def app(self, name, delete=False):
app_names = self.__get_app_names(name)
app_configs = self.__get_app_config(name)
api_path = app_configs["api"]["path"]
if delete is True:
with Sultan.load() as s:
for k, v in app_names.iteritems():
self.__print_result(s.echo("y").pipe().cf("delete", v, "-r").run())
else:
with Sultan.load() as s:
apps_len = len(app_names)
i = 1
s.cd("../../../").and_()
for k, v in app_names.iteritems():
s.cd("{}".format(app_configs[k]["path"])).and_()
path = "../../../{}".format(app_configs[k]["path"])
file = "../{}/scripts/env-deploy/{}{}.yml".format(api_path, TEMP_PATH, v)
if os.path.exists("{}/{}".format(path, file)):
npm_commands = app_configs[k]["npm"]
for npm_command in npm_commands:
s.npm(npm_command).and_()
s.cf("zero-downtime-push", v, "-show-app-log", "-f {}".format(file)).and_()
else:
print("Cannot find '{}'. Execute python generate_manifest_files {} first".format(file, name))
i += 1
if i <= apps_len:
s.cd("..").and_()
else:
s.cd("..")
self.__print_result(s.run())
def generate_config_files(self, name):
common_config_name = self.__get_common_config(name)
env_name = self.__get_env_name(name)
common_config_template = None
with open("templates/common-config.json.tpl", "r") as file:
common_config_template = file.read()
common_config_template = common_config_template.format(env_name=env_name)
with open("{}{}.json".format(TEMP_PATH, common_config_name), "w") as file:
file.write(common_config_template)
ups_service_names = self.__get_ups_service_names(name)
for k, v in ups_service_names.iteritems():
config_template = None
with open("templates/{}-config.json.tpl".format(k), "r") as file:
config_template = file.read()
config_template = config_template.format(env_name=env_name)
with open("{}{}.json".format(TEMP_PATH, v), "w") as file:
file.write(config_template)
def generate_manifest_files(self, name):
app_names = self.__get_app_names(name)
env_name = self.__get_env_name(name)
for k, v in app_names.iteritems():
self.generate_manifest_file(name, env_name, k)
def generate_manifest_file(self, name, env_name, app_name):
app_names = self.__get_app_names(name)
ups_service_names = self.__get_ups_service_names(name)
common_config_name = self.__get_common_config(name)
postgres_service_name = self.__get_postgres_service_name(name)
manifest_template = None
with open("templates/{}-manifest.yml.tpl".format(app_name), "r") as file:
manifest_template = file.read()
manifest_template = manifest_template.format(env_name=env_name,
common_config_name=common_config_name,
postgres_service_name=postgres_service_name,
api_name=app_names["api"],
api_config_name=ups_service_names["api"],
buyer_name=app_names["buyer"],
buyer_config_name=ups_service_names["buyer"],
supplier_name=app_names["supplier"],
supplier_config_name=ups_service_names["supplier"],
frontend_name=app_names["frontend"],
frontend_config_name=ups_service_names["frontend"],
admin_name=app_names["admin"],
admin_config_name=ups_service_names["admin"])
with open("{}dm-{}-{}.yml".format(TEMP_PATH, name, app_name), "w") as file:
file.write(manifest_template)
def execute_db_task(self, name, db_name="digitalmarketplace", snapshot_file="snapshot.tar", delete=False):
env_name = self.__get_env_name(name)
db_task_name = "{}-db-task".format(env_name)
if delete is True:
print("deleting {}".format(db_task_name))
with Sultan.load() as s:
self.__print_result(s.echo("y").pipe().cf("delete", db_task_name).run())
return
self.generate_manifest_file(name, env_name, "db-task")
with Sultan.load(cwd="schema-sync") as s:
self.__print_result(
s.pg_dump("--no-owner",
"--no-privileges",
"--column-inserts",
"--dbname={}".format(db_name),
"-f snapshot.tar", "-F t").run())
self.__print_result(
s.cf("push",
db_task_name,
"-f ../{}{}.yml".format(TEMP_PATH, db_task_name)).run())
with Sultan.load() as s:
result = s.cf("app", db_task_name, "--guid").run()
self.__print_result(result)
db_task_id = result.stdout[0]
db_task_env_file_name = "{}db-task-env.json".format(TEMP_PATH)
result = s.cf("curl", '"/v2/apps/{}/env"'.format(db_task_id)).redirect(
db_task_env_file_name,
append=False,
stdout=True,
stderr=False).run()
self.__print_result(result)
with open(db_task_env_file_name) as data_file:
db_task_env = json.load(data_file)
postgres_uri = db_task_env["system_env_json"]["VCAP_SERVICES"]["postgres"][0]["credentials"]["uri"]
print(postgres_uri)
result = s.cf("run-and-wait",
db_task_name,
'"pgutils/pg_restore --no-owner --dbname={postgres_uri} {snapshot_file}"'
.format(postgres_uri=postgres_uri,
snapshot_file=snapshot_file)).run()
self.__print_result(result)
self.__print_result(s.cf("stop", db_task_name).run())
def __print_result(self, result):
print("stdout")
for i in result.stdout:
print(i)
print("stderr")
for i in result.stderr:
print(i)
print("return code")
print(result.rc)
def __get_app_names(self, name):
env_name = self.__get_env_name(name)
apps = self.__get_app_config(name)
result = {}
for k, v in apps.iteritems():
result[k] = "{}-{}".format(env_name, k)
return result
def __get_ups_service_names(self, name):
env_name = "ups-dm-{}".format(name)
apps = self.__get_app_config(name)
result = {}
for k, v in apps.iteritems():
result[k] = "{}-{}".format(env_name, k)
return result
def __get_env_name(self, name):
return "dm-{}".format(name)
def __get_postgres_service_name(self, name):
env_name = self.__get_env_name(name)
return "marketplace-{}-shared".format(env_name)
def __get_common_config(self, name):
env_name = self.__get_env_name(name)
common_config_name = "ups-{}-common".format(env_name)
return common_config_name
def __get_app_config(self, name):
app_config = None
with open("app_config.json") as app_config_file:
app_config = json.load(app_config_file)
return app_config
if __name__ == '__main__':
fire.Fire(EnvDeploy)
|
import pandas as pd
import numpy as np
from sklearn import model_selection
from sklearn.model_selection import KFold
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from utilities import train_model
from sklearn.ensemble import RandomForestClassifier
#Read LIWC output file
df = pd.read_csv("./Real_Life_Trial_Data/LIWC_RealLife.csv")
#split training and testing data
x_train, x_test, y_train, y_test = model_selection.train_test_split(df.iloc[:,2:-1],df.iloc[:,-1], test_size=0.15, random_state=0)
# Define classifiers
classifier1=GaussianNB()
classifier2=SVC(kernel='linear')
classifier3 = RandomForestClassifier(n_estimators=2, random_state=0, max_features='auto', min_samples_split=2)
#K-Fold cross validation on training set
k=5
kf=KFold(n_splits=k,shuffle=True,random_state=0)
print("K-Fold cross validation (K=%d)"%k)
i=1
for train_index, valid_index in kf.split(x_train):
print("\nFold ",i)
i+=1
training_data,valid_data=x_train.iloc[train_index],x_train.iloc[valid_index]
expected_labels = y_train.iloc[valid_index]
result1=train_model(classifier1,training_data,y_train.iloc[train_index], valid_data, expected_labels)
print("NB result : ",result1)
result2=train_model(classifier2,training_data,y_train.iloc[train_index], valid_data, expected_labels)
print("SVM result : ",result2)
result3=train_model(classifier3, training_data, y_train.iloc[train_index], valid_data, expected_labels)
print("Random Forest result : ",result3)
#Final classification
print("Train-test classification...\n")
result1=train_model(classifier1,x_train, y_train, x_test, y_test)
print("NB result : ",result1)
result2=train_model(classifier2,x_train, y_train, x_test, y_test)
print("SVM result : ",result2)
result3=train_model(classifier3,x_train, y_train, x_test, y_test)
print("Random Forest result : ",result3) |
from collections import OrderedDict
from functools import wraps
from itertools import chain
from graphql.core.type import GraphQLArgument
from ...utils import ProxySnakeDict, to_camel_case
from .base import ArgumentType, BaseType, OrderedType
class Argument(OrderedType):
def __init__(self, type, description=None, default=None,
name=None, _creation_counter=None):
super(Argument, self).__init__(_creation_counter=_creation_counter)
self.name = name
self.type = type
self.description = description
self.default = default
def internal_type(self, schema):
return GraphQLArgument(
schema.T(self.type),
self.default, self.description)
def __repr__(self):
return self.name
class ArgumentsGroup(BaseType):
def __init__(self, *args, **kwargs):
arguments = to_arguments(*args, **kwargs)
self.arguments = OrderedDict([(arg.name, arg) for arg in arguments])
def internal_type(self, schema):
return OrderedDict([(arg.name, schema.T(arg))
for arg in self.arguments.values()])
def __len__(self):
return len(self.arguments)
def __iter__(self):
return iter(self.arguments)
def __contains__(self, *args):
return self.arguments.__contains__(*args)
def __getitem__(self, *args):
return self.arguments.__getitem__(*args)
def to_arguments(*args, **kwargs):
arguments = {}
iter_arguments = chain(kwargs.items(), [(None, a) for a in args])
for name, arg in iter_arguments:
if isinstance(arg, Argument):
argument = arg
elif isinstance(arg, ArgumentType):
argument = arg.as_argument()
else:
raise ValueError('Unknown argument %s=%r' % (name, arg))
if name:
argument.name = to_camel_case(name)
assert argument.name, 'Argument in field must have a name'
assert argument.name not in arguments, 'Found more than one Argument with same name {}'.format(
argument.name)
arguments[argument.name] = argument
return sorted(arguments.values())
def snake_case_args(resolver):
@wraps(resolver)
def wrapped_resolver(instance, args, info):
return resolver(instance, ProxySnakeDict(args), info)
return wrapped_resolver
|
import media
import fresh_tomatoes
# first instance of class Movie
justice_league = media.Movie("Justice League",
"A group of superheroes unite",
"https://upload.wikimedia.org/wikipedia/en/3/31/Justice_League_film_poster.jpg", # NOQA
"https://www.youtube.com/watch?v=r9-DM9uBtVI")
# second instance of class Movie
thor_ragnarok = media.Movie("Thor Ragnarok",
"Thor must stop Hela and ragnarok should occur",
"https://upload.wikimedia.org/wikipedia/en/7/7d/Thor_Ragnarok_poster.jpg", # NOQA
"https://www.youtube.com/watch?v=ue80QwXMRHg")
# third instance of class Movie
dunkirk = media.Movie("Dunkirk",
"Allied Forces rescue mission from the Dunkirk",
"https://upload.wikimedia.org/wikipedia/en/1/15/Dunkirk_Film_poster.jpg", # NOQA
"https://www.youtube.com/watch?v=F-eMt3SrfFU")
# fourth instance of class Movie
spiderman_homecoming = media.Movie("Spiderman Homecoming",
"Spiderman fights to protect the home",
"https://upload.wikimedia.org/wikipedia/en/f/f9/Spider-Man_Homecoming_poster.jpg", # NOQA
"https://www.youtube.com/watch?v=U0D3AOldjMU") # NOQA
# these objects stored in a list data structure named movies
movies = [justice_league, dunkirk, spiderman_homecoming, thor_ragnarok]
# open_movies_page() function takes as input list of movies()
fresh_tomatoes.open_movies_page(movies)
|
import jinja2
from xhtml2pdf import pisa
fileSystem = jinja2.FileSystemLoader(searchpath="./PrintFunction/templates")
env = jinja2.Environment(loader=fileSystem)
template = env.get_template("familleProduitTemplate.html")
def render(data, name=None):
resultat = template.render(codefamilleproduit=data[0], nomfamilleproduit=data[1])
if not name: name="familleProduit"
with open("{}.pdf".format(name), "wb") as f:
pisa.CreatePDF(resultat, dest=f)
|
import numpy as np
import astropy.units as au
from astropy.units import Quantity
import pylab as plt
from born_rime.fourier import fourier, inv_fourier, fft_freqs
from born_rime.greens import two_dim_g, two_dim_G
from born_rime.potentials import partial_blockage, pad_with_absorbing_boundary_conditions
def main():
"""
First we compare convergence of Born series to exact solution on a partially blocked bar:
| * source
|
| _________________
| | n = 1 - dn
| |________________
|
|
| x receiver
|(0,0)
"""
nu = 50e6 / au.s
x, z, k2, k02 = partial_blockage(1000, nu, True)
k2, m, (x, z) = pad_with_absorbing_boundary_conditions(k2, k02, 1, x, z, dn_max=0.001)
# corner_indices = [0,0]
scatter_potential = (k2 - k02)/k02
plt.imshow(np.abs(scatter_potential.T.value), interpolation='nearest', origin='lower',
extent=(x.min().value, x.max().value, z.min().value, z.max().value),
cmap='bone')
plt.plot(Quantity([x[m[0]], x[-m[0]], x[-m[0]], x[m[0]], x[m[0]]]).value,
Quantity([z[m[1]], z[m[1]], z[-m[1]], z[-m[1]], z[m[1]]]).value, c='red')
plt.title(r'Sinusoidal partial blockage potential ($k^2(\mathbf{{x}}) - k_0^2$) at {} with boundary'.format(
nu.to(au.MHz)))
plt.colorbar(label='potential [{}]'.format(scatter_potential.unit))
plt.show()
X,Z = np.meshgrid(x,z,indexing='ij')
R = np.sqrt((X-(-300.*au.m))**2 + (Z-(0*au.m))**2)
E_i = np.exp(1j*np.sqrt(k02)*R)/(1*au.m**2 + R**2)
E_i = np.exp(1j*np.sqrt(k02)*Z)
g = two_dim_g(np.sqrt(k02), X, Z)
plt.imshow((np.abs(g).value), interpolation='nearest', origin='lower',
extent=(x.min().value, x.max().value, z.min().value, z.max().value),
cmap='bone')
plt.colorbar()
plt.plot(Quantity([x[m[0]], x[-m[0]], x[-m[0]], x[m[0]], x[m[0]]]).value,
Quantity([z[m[1]], z[m[1]], z[-m[1]], z[-m[1]], z[m[1]]]).value, c='red')
plt.title('g')
plt.show()
G_numeric = fourier(g, x, z)
# sx, sy = fft_freqs(x,z)
# Sx, Sy = np.meshgrid(sx,sy, indexing='ij')
# G_numeric = two_dim_G(np.sqrt(k02),Sx, Sy)
n = G_numeric.shape[0]
pad_size = 1#int((n*0.6)/2.)
plt.imshow((np.abs(G_numeric).value), interpolation='nearest', origin='lower',
extent=(x.min().value, x.max().value, z.min().value, z.max().value),
cmap='bone')
plt.colorbar()
plt.plot(Quantity([x[m[0]], x[-m[0]], x[-m[0]], x[m[0]], x[m[0]]]).value,
Quantity([z[m[1]], z[m[1]], z[-m[1]], z[-m[1]], z[m[1]]]).value, c='red')
plt.title('G')
plt.show()
G_padded = np.pad(G_numeric,pad_size, mode='constant')
x_padded = np.pad(x, pad_size, mode='linear_ramp')
z_padded = np.pad(z, pad_size, mode='linear_ramp')
E_born = E_i
for i in range(1, 4):
source = scatter_potential * E_born
source_padded = np.pad(source, pad_size)
f_source_padded = fourier(source_padded, x_padded, z_padded)
E_born = E_i + k02*inv_fourier(G_padded * f_source_padded, x_padded, z_padded)[pad_size:-pad_size,pad_size:-pad_size]
E_s = E_born - E_i
plt.imshow((np.abs(E_s.T).value), interpolation='nearest', origin='lower',
extent=(x.min().value, x.max().value, z.min().value, z.max().value),
cmap='bone')
plt.colorbar()
plt.plot(Quantity([x[m[0]], x[-m[0]], x[-m[0]], x[m[0]], x[m[0]]]).value,
Quantity([z[m[1]], z[m[1]], z[-m[1]], z[-m[1]], z[m[1]]]).value, c='red')
plt.title('Born-{}'.format(i))
plt.show()
# plt.plot(x, np.abs(E_born.T[0,:]))
# # plt.xscale('log')
# plt.show()
#
# plt.plot(x, np.angle(E_born.T[0, :]))
# plt.show()
_vis = E_born.T[200, :, None] * E_born.T[200, None, :].conj()
vis = [np.mean(np.diagonal(_vis, i)) for i in range(x.size)]
b = x[:, None] - x[None, :]
plt.plot(b[0, :], np.abs(vis))
plt.title('|vis|')
plt.show()
plt.plot(b[0, :], np.angle(vis))
plt.title('Arg(vis)')
plt.show()
pass
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
bytecode_map = {
"move": "M", # M指令集
"move/from16": "M",
"move/16": "M",
"move-wide": "M",
"move-wide/from16": "M",
"move-wide/16": "M",
"move-object": "M",
"move-object/from16": "M",
"move-object/16": "M",
"move-result": "M",
"move-result-wide": "M",
"move-result-object": "M",
"move-exception": "M",
"return-void": "R", # R指令集
"return": "R",
"return-wide": "R",
"return-object": "R",
"goto": "G", # G指令集
"goto/16": "G",
"goto/32": "G",
"if-eq": "I", #I 指令集
"if-ne": "I",
"if-lt": "I",
"if-ge": "I",
"if-gt": "I",
"if-le": "I",
"if-eqz": "I",
"if-nez": "I",
"if-ltz": "I",
"if-gez": "I",
"if-gtz": "I",
"if-lez": "I",
"aget": "T", # T指令集
"aget-wide": "T",
"aget-object": "T",
"aget-boolean": "T",
"aget-byte": "T",
"aget-char": "T",
"aget-short": "T",
"iget": "T",
"iget-wide": "T",
"iget-object": "T",
"iget-boolean": "T",
"iget-byte": "T",
"iget-char": "T",
"iget-short": "T",
"sget": "T",
"sget-wide": "T",
"sget-object": "T",
"sget-boolean": "T",
"sget-byte": "T",
"sget-char": "T",
"sget-short": "T",
"aput": "P", # P指令集
"aput-wide": "P",
"aput-object": "P",
"aput-boolean": "P",
"aput-byte": "P",
"aput-char": "P",
"aput-short": "P",
"iput": "P",
"iput-wide": "P",
"iput-object": "P",
"iput-boolean": "P",
"iput-byte": "P",
"iput-char": "P",
"iput-short": "P",
"sput": "P",
"sput-wide": "P",
"sput-object": "P",
"sput-boolean": "P",
"sput-byte": "P",
"sput-char": "P",
"sput-short": "P",
"invoke-virtual": "V", # V指令集
"invoke-super": "V",
"invoke-direct": "V",
"invoke-static": "V",
"invoke-interface": "V",
"invoke-virtual/range": "V",
"invoke-super/range": "V",
"invoke-direct/range": "V",
"invoke-static/range": "V",
"invoke-interface/range": "V",
} |
import socket, json, config, sys, os, time
class Api:
def __init__(self, db, sync):
self.db = db
self.sync = sync
while True:
try:
self.s = socket.socket()
self.s.bind((config.ip, config.port))
self.s.listen(5)
break
except:
print "Could not start socket! Will try again in 10 seconds..."
time.sleep(10)
def start(self):
while True:
c, addr = self.s.accept()
json_data = c.recv(1024)
try:
data = json.loads(json_data)
print "Received command " + data['cmd']
if data['cmd'] == "getseries":
c.send(json.dumps(self.db.get_series()))
elif data['cmd'] == "getseasons":
series_id = data["id"]
c.send(json.dumps(self.db.get_seasons(series_id)))
elif data['cmd'] == "getepisodes":
series_id = data["id"]
season = data["season"]
c.send(json.dumps(self.db.get_episodes(series_id, season)))
elif data['cmd'] == "getmovieposter":
id = data['id']
try:
format = data['format']
except:
format = "original"
path = "server/db/posters/movies/" + str(id) + "_" + format + ".jpg" if os.path.exists("server/db/posters/movies/" + str(id) + "_" + format + ".jpg") else "server/db/posters/unknown.jpg"
f = open(path, "r")
c.send(f.read())
f.close()
elif data['cmd'] == "getepisodeposter":
id = data['id']
path = "server/db/posters/episodes/" + str(id) + ".jpg" if os.path.exists("server/db/posters/episodes/" + str(id) + ".jpg") else "server/db/posters/unknown.jpg"
f = open(path, "r")
c.send(f.read())
f.close()
elif data['cmd'] == "getseriesposter":
id = data['id']
path = "server/db/posters/series/" + str(id) + ".jpg" if os.path.exists("server/db/posters/series/" + str(id) + ".jpg") else "server/db/posters/unknown.jpg"
f = open(path, "r")
c.send(f.read())
f.close()
elif data['cmd'] == "getmovies":
c.send(json.dumps(self.db.get_movies()))
elif data['cmd'] == "shutdown":
ans = {"msg" : "bye"}
c.send(json.dumps(ans))
c.close()
break
elif data['cmd'] == "resync":
self.sync.initSync()
ans = {"msg" : "ok"}
c.send(json.dumps(ans))
except:
print "Could not understand the request " + json_data
pass
c.close()
|
from keras.models import Model, Sequential
from keras.layers import Input, Flatten, Dense, Dropout
from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D
from keras import backend as K
def VGG16(input_dim=224, input_depth=3, output_dim=1000, include_top=True):
# Determine proper input shape
if K.image_dim_ordering() == 'th':
input_shape = (input_depth, input_dim, input_dim)
bn_axis = 3
else:
input_shape = (input_dim, input_dim, input_depth)
bn_axis = 1
img_input = Input(shape=input_shape)
# Block 1
x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv1')(img_input)
x = Conv2D(64, (3, 3), activation='relu', padding='same', name='block1_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x)
# Block 2
x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv1')(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same', name='block2_conv2')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x)
# Block 3
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv1')(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv2')(x)
x = Conv2D(256, (3, 3), activation='relu', padding='same', name='block3_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x)
# Block 4
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv1')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv2')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block4_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x)
# Block 5
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv1')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv2')(x)
x = Conv2D(512, (3, 3), activation='relu', padding='same', name='block5_conv3')(x)
x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x)
if include_top:
x = Flatten(name='flatten')(x)
x = Dense(4096, activation='relu', name='fc1')(x)
x = Dense(4096, activation='relu', name='fc2')(x)
x = Dense(output_dim, activation='softmax', name='predictions')(x)
full_model = Model(inputs=img_input, outputs=x)
return full_model
if __name__ == '__main__':
model = VGG16(input_dim=224, input_depth=3, output_dim=1000, include_top=True)
print model.summary()
# def VGG_16():
# model = Sequential()
# model.add(ZeroPadding2D((1,1),input_shape=(3,224,224)))
# model.add(Convolution2D(64, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(64, 3, 3, activation='relu'))
# model.add(MaxPooling2D((2,2), strides=(2,2)))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(128, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(128, 3, 3, activation='relu'))
# model.add(MaxPooling2D((2,2), strides=(2,2)))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(256, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(256, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(256, 3, 3, activation='relu'))
# model.add(MaxPooling2D((2,2), strides=(2,2)))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(512, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(512, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(512, 3, 3, activation='relu'))
# model.add(MaxPooling2D((2,2), strides=(2,2)))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(512, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(512, 3, 3, activation='relu'))
# model.add(ZeroPadding2D((1,1)))
# model.add(Convolution2D(512, 3, 3, activation='relu'))
# model.add(MaxPooling2D((2,2), strides=(2,2)))
# model.add(Flatten())
# model.add(Dense(4096, activation='relu'))
# model.add(Dropout(0.5))
# model.add(Dense(4096, activation='relu'))
# model.add(Dropout(0.5))
# model.add(Dense(1000, activation='softmax'))
# return model
|
from pathlib import Path
import inspect
from datetime import datetime
import re
import xmltodict
import json
import shutil
from utils import loadurlmap, add_syndication, get_content, add_to_listmap, urlmap_to_mdfile, clean_string
from utils import MDSearcher, URLResolver, PostBuilder, CommentBuilder
urlmap = loadurlmap(False)
archivepath = Path("C:\\temp\\YouTube\\archive")
def import_youtube(importfilepath):
post_count = 0
unmatched = []
public = []
nonpublic = []
importpath = Path(importfilepath)
videofiles = {}
for videofile in importpath.glob("**/*.flv"):
videofiles[videofile.stem] = videofile
for videofile in importpath.glob("**/*.mp4"):
videofiles[videofile.stem] = videofile
for jsonfile in importpath.glob("**/*.json"):
# print(jsonfile)
post_count = post_count + 1
with jsonfile.open(encoding="UTF-8") as f:
doc = json.loads(f.read())[0]
if "public" == doc["status"]["privacyStatus"]:
public.append(doc)
id = doc["id"]
title = doc["snippet"]["title"]
desc = doc["snippet"]["description"]
url = "https://www.youtube.com/watch?v=%s" % (id)
date = doc["snippet"]["publishedAt"]
# "publishedAt" : "2014-11-16T06:16:06.000Z",
date = datetime.strptime(date[:-5], "%Y-%m-%dT%H:%M:%S")
# find the video file for this json
stem = jsonfile.stem
idx = stem.rfind("-")
videofile = videofiles.get(stem[0:idx])
post = PostBuilder(id, source="youtube", content="%s\n%s\n\n{{< youtube \"%s\" >}}" % (title, desc, id))
post.date = date
post.add_syndication("youtube", url)
post.tags = doc["snippet"].get("tags", [])
if videofile is None:
print("#### Could not find videofile for %s" % (stem))
print(len(stem))
else:
suffix = videofile.suffix
newfilename = "%s%s" % (id, suffix)
post.params["archive_url"] = "https://roytang.net/archive/videos/%s" % (newfilename)
# copy the videofile to the archive path
newfile = archivepath / newfilename
if not newfile.exists():
shutil.copy(str(videofile), str(newfile))
post.save()
else:
nonpublic.append(doc)
print(len(public))
print(len(nonpublic))
print(post_count)
import_youtube("C:\\temp\\Youtube\\videos")
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route("/", methods=["GET"])
def index():
names = {'name' : 'Anthony'}
return render_template("layout.html", names=names, language='Python', lang=False, framework='Flask')
if __name__ == "__main__":
app.run(debug=True) |
from flask import Blueprint, render_template, request, jsonify
pie_blueprint = Blueprint('pie', __name__, url_prefix='/pie')
@pie_blueprint.route('/', methods=("GET", "POST"))
def index():
ingredient = ['apples', 'strawberries', 'hampster']
if request.method =='GET':
return jsonify({'pie ingredient': ingredient[0]}) |
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 31 18:10:37 2016
@author: saisugeethkamineni
"""
# Leave Automation Project #
'''
start
'''
import time
import math
from sys import exit as close
import numpy as np
import matplotlib.pyplot as plt
database = open('leave automation.csv', 'r')
readDatabase = database.read()
# print('\n', readDatabase)
rows = readDatabase.split('\n')
# print (rows)
employee_rows = []
for i in range(len(rows)):
employee_rows.append(rows[i])
# print (employee_rows)
employees = []
i = 0
for j in employee_rows:
employees.append(j.split(','))
del(employees[i][(len(employees[i])-1)])
i += 1
# print (employees)
employees_data = []
for k in employees:
employees_cells = []
for l in k:
employees_cells.append([l])
employees_data.append(employees_cells)
# print('\n', employees_data)
for a in employees_data:
for b in a:
if b[0] == '':
b.clear()
# print('\n', employees_data)
for a in employees_data:
b = a[4][0]
e = b[:2]
c = b[3:5]
d = b[6:]
if int(e) > 31 or int(c) > 12 or int(d) > 16 or (int(e) > 29 and int(c) == 2) or (
int(e) == 31 and (int(c) == (4 or 6 or 9 or 11))):
print ("\nIncorrect joining date.")
close()
if int(d) + 30 != int(time.time() / (60 * 60 * 24 * 365)):
a[5].append(8)
a[18].append(24)
a[6][0] = int(a[6][0])
a[7].append(a[5][0] - a[6][0])
a[19][0] = int(a[19][0])
a[20].append(a[18][0] - a[19][0])
a[9][0] = int(a[9][0])
a[14][0] = int(a[14][0])
if int(e) < 15:
l = (((13 - int(c)) / 12.0) * 10.0)
if l >= int(l) + 0.5:
m = math.ceil(l)
else:
m = math.floor(l)
else:
l = (((12 - int(c)) / 12.0) * 10.0)
if l >= int(l) + 0.5:
m = math.ceil(l)
else:
m = math.floor(l)
if int(d) == 15:
a[14][0] = int(a[14][0])
a[11][0] = int(a[11][0])
a[15][0] = int(a[15][0])
a[10][0] = int(a[10][0])
a[16][0] = int(a[16][0])
a[8].append((m - a[10][0] + 10))
a[12].append(a[8][0] - a[9][0])
if int(c) <= 6:
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) <= 0.5:
a[13].append(30 - a[15][0] + 15)
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) > 0.5:
a[13].append(30 - a[15][0] + 30)
if int(c) > 7:
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) <= 0.5:
a[13].append(15 - a[15][0] + 15)
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) > 0.5:
a[13].append(15 - a[15][0] + 30)
a[17].append(a[13][0] - a[14][0])
if int(d) == 14:
a[14][0] = int(a[14][0])
a[15][0] = int(a[15][0])
a[10][0] = int(a[10][0])
a[11][0] = int(a[11][0])
a[16][0] = int(a[16][0])
a[8].append(m - a[11][0] + 10 - a[10][0] + 10)
a[12].append(a[8][0] - a[9][0])
a[16][0] = int(a[16][0])
if int(c) <= 6:
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) <= 0.5:
a[13].append(30 - a[16][0] + 30 - a[15][0] + 15)
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) > 0.5:
a[13].append(30 - a[16][0] + 30 - a[15][0] + 30)
if int(c) > 7:
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) <= 0.5:
a[13].append(15 - a[16][0] + 30 - a[15][0] + 15)
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) > 0.5:
a[13].append(15 - a[16][0] + 30 - a[15][0] + 30)
a[17].append(a[13][0] - a[14][0])
else:
a[10][0] = int(a[10][0])
a[11][0] = int(a[11][0])
a[14][0] = int(a[14][0])
a[15][0] = int(a[15][0])
a[16][0] = int(a[16][0])
if int(c) <= 6:
if (time.time() / (60 * 60 * 24 * 365.0)) - (int(time.time() / (60 * 60 * 24 * 365.0))) <= 0.5:
a[13].append(15)
else:
a[13].append(30)
a[17].append(a[13][0] - a[14][0])
if int(c) > 7:
a[13].append(15)
a[17].append(a[13][0] - a[14][0])
if int(e) < 15:
f = (((13 - int(c)) / 12.0) * 8.0)
if f >= int(f) + 0.5:
h = math.ceil(f)
else:
h = math.floor(f)
g = (((13 - int(c)) / 12.0) * 24.0)
if g >= int(f) + 0.5:
i = math.ceil(g)
else:
i = math.floor(g)
l = (((13 - int(c)) / 12.0) * 10.0)
if l >= int(l) + 0.5:
m = math.ceil(l)
else:
m = math.floor(l)
a[5].append(h)
a[18].append(i)
a[8].append(m)
a[6][0] = int(a[6][0])
a[7].append(a[5][0] - a[6][0])
a[19][0] = int(a[19][0])
a[20].append(a[18][0] - a[19][0])
a[9][0] = int(a[9][0])
a[12].append(a[8][0] - a[9][0])
else:
f = (((12 - int(c)) / 12.0) * 8.0)
if f >= int(f) + 0.5:
j = math.ceil(f)
else:
j = math.floor(f)
g = (((12 - int(c)) / 12.0) * 24.0)
if g >= int(f) + 0.5:
k = math.ceil(g)
else:
k = math.floor(g)
l = (((12 - int(c)) / 12.0) * 10.0)
if l >= int(l) + 0.5:
m = math.ceil(l)
else:
m = math.floor(l)
a[5].append(j)
a[18].append(k)
a[8].append(m)
a[6][0] = int(a[6][0])
a[7].append(a[5][0] - a[6][0])
a[19][0] = int(a[19][0])
a[20].append(a[18][0] - a[19][0])
a[9][0] = int(a[9][0])
a[12].append(a[8][0] - a[9][0])
# print('\n', employees_data)
for x in employees_data:
x[30][0] = int(x[30][0])
if x[30][0] == 0:
x[22][0] = int(x[22][0])
x[25][0] = int(x[25][0])
x[28][0] = int(x[28][0])
x[21].append(0)
x[23].append(0)
x[24].append(0)
x[26].append(0)
x[27].append(0)
x[29].append(0)
if (x[30][0]) == 1 or (x[30][0]) == 2:
if (x[3][0]).lower() == 'male':
x[21].append(32)
x[22][0] = int(x[22][0])
x[23].append(x[21][0] - x[22][0])
x[24].append(0)
x[25][0] = int(x[25][0])
x[26].append(0)
x[27].append(0)
x[28][0] = int(x[28][0])
x[29].append(0)
else:
x[24].append(90)
x[27].append(30)
x[25][0] = int(x[25][0])
x[26].append(x[24][0] - x[25][0])
x[28][0] = int(x[28][0])
x[29].append(x[27][0] - x[28][0])
x[21].append(0)
x[22][0] = int(x[22][0])
x[23].append(0)
# print('\n', employees_data)
#
##if __name__ == '__main__':
## print('\n', employees_data)
# '''
# done
# '''
empid = input("Enter Employee ID: ")
for a in employees_data:
q = a[1][0]
if (empid == q):
if (a[3][0] == "Male"):
print("\nWelcome Mr." + a[2][0])
else:
print("\nWelcome Miss / Mrs." + a[2][0])
print('\n1. Query mode \n2. Update Mode')
choice = int(input("Enter '1' for query mode and '2' for update mode: "))
if choice == 1:
for employee in employees_data:
if empid == employee[1][0]:
a = employee
break
n_groups = 7
# print (a)
used = [a[6][0], a[9][0], a[14][0], a[19][0], a[22][0], a[25][0], a[28][0]]
balance = [a[7][0], a[12][0], a[17][0], a[20][0], a[23][0], a[26][0], a[29][0]]
# create plot
ax = plt.subplots()
index = np.arange(n_groups)
bar_width = 0.15
opacity = 1
rects1 = plt.bar(index + 0.2, balance, bar_width, color='g', label='Available')
rects2 = plt.bar(index + bar_width - 0.1, used, bar_width, color='r', label='Used')
plt.xlabel('Type of leaves')
plt.ylabel('No. of leaves')
plt.title('QUERY MODE')
plt.xticks(index + bar_width + 0.05, ('CL', 'SL', 'EL', 'PDL', 'PL', 'ML', 'CC'))
plt.legend(loc = "upper left")
plt.tight_layout()
plt.show()
print("---------------------------------------------")
print("| CL | CASUAL LEAVES |")
print("| SL | SICK LEAVES |")
print("| EL | EARNED LEAVES |")
print("| PDL | PROFESSIONAL DEVELOPMENT LEAVES |")
print("| PL | PARENTING LEAVE |")
print("| ML | MATERNITY LEAVE |")
print("| CC | CHILD CARETAKING LEAVE |")
print("---------------------------------------------")
ch = input('Would you like to go thorugh the update mode? (y/n): ')
if ch.lower() == 'y':
for a in employees_data:
if empid == a[1][0]:
t = a
break
# print (a)
typ = input('Enter the type of leave(CL/SL/EL/PDL/PL/ML/CC): ')
typ = typ.upper()
num = int(input('Enter the number of leaves required:'))
if (typ == 'CL' and num > a[7][0]) or (typ == 'SL' and num > a[12][0]) or (typ == 'EL' and num > a[17][0]) or (
typ == 'PDL' and num > a[20][0]) or (typ == "PL" and num > a[23][0]) or (
typ == 'ML' and num > a[26][0]) or (typ == 'CC' and num > a[29][0]):
print('\nNumber of leaves applied for is either exceeding the balance in the respective category or is not applicable.')
else:
# print (t)
u = 0
if typ == 'CL':
if num <= 2:
t[6][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nMaximum 2 at a time.')
elif typ == 'SL':
if num <= 3:
t[9][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nTaking more than 3 leaves requires a medical certificate.')
mc = input('Do you have a medical certificate?\n')
if mc.lower() == 'yes':
t[9][0] += num
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nSorry, leave cannot be granted.')
elif typ == 'EL':
t[14][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
elif typ == 'PDL':
t[19][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
elif typ == 'PL':
if t[3][0] == 'Male':
t[22][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nNot applicable.')
elif typ == 'ML':
if t[3][0] == 'Female':
t[25][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nNot applicable.')
elif typ == 'CC':
if t[3][0] == 'Female':
t[28][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u = 1
else:
print('\nNot applicable.')
# print (t)
if u==1:
for employee in employees_data:
for cell in range(len(employee)):
if cell == (5):
(employee[cell]).clear()
if cell == (7):
(employee[cell]).clear()
if cell == (8):
(employee[cell]).clear()
if cell == (12):
(employee[cell]).clear()
if cell == (13):
(employee[cell]).clear()
if cell == (17):
(employee[cell]).clear()
if cell == (18):
(employee[cell]).clear()
if cell == (20):
(employee[cell]).clear()
if cell == (21):
(employee[cell]).clear()
if cell == (23):
(employee[cell]).clear()
if cell == (24):
(employee[cell]).clear()
if cell == (26):
(employee[cell]).clear()
if cell == (27):
(employee[cell]).clear()
if cell == (29):
(employee[cell]).clear()
# print (employees_data)
file = ''
n = 1
for employee in employees_data:
if n == 1:
n = 2
else:
file += '\n'
for c in employee:
if c == []:
file += ('' + ',')
else:
file += (str(c[0]) + ',')
# print (file)
database.close()
database = open('leave automation.csv', 'w+')
database.write(file)
# o = (database.read())
# print (o)
# print (database.read())
database.close()
else:
database.close()
print('\nThe program has been terminated. Thank you!')
if choice == 2:
for a in employees_data:
if empid == a[1][0]:
t = a
break
# print (a)
typ = input('Enter the type of leave(CL/SL/EL/PDL/PL/ML/CC): ')
typ = typ.upper()
num = int(input('Enter the number of leaves required:'))
if (typ == 'CL' and num > a[7][0]) or (typ == 'SL' and num > a[12][0]) or (typ == 'EL' and num > a[17][0]) or (
typ == 'PDL' and num > a[20][0]) or (typ == "PL" and num > a[23][0]) or (
typ == 'ML' and num > a[26][0]) or (typ == 'CC' and num > a[29][0]):
print('\nNumber of leaves applied for is either exceeding the balance in the respective category or is not applicable.')
else:
# print (t)
u = 0
if typ == 'CL':
if num <= 2:
t[6][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nMaximum 2 at a time.')
elif typ == 'SL':
if num <= 3:
t[9][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nTaking more than 3 leaves requires a medical certificate.')
mc = input('Do you have a medical certificate?\n')
if mc.lower() == 'yes':
t[9][0] += num
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nSorry, leave cannot be granted.')
elif typ == 'EL':
t[14][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
elif typ == 'PDL':
t[19][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
elif typ == 'PL':
if t[3][0] == 'Male':
t[22][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nNot applicable.')
elif typ == 'ML':
if t[3][0] == 'Female':
t[25][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u=1
else:
print('\nNot applicable.')
elif typ == 'CC':
if t[3][0] == 'Female':
t[28][0] += (num)
print('\nThe leave is granted and the database has been updated.')
u = 1
else:
print('\nNot applicable.')
# print (t)
if u==1:
for employee in employees_data:
for cell in range(len(employee)):
if cell == (5):
(employee[cell]).clear()
if cell == (7):
(employee[cell]).clear()
if cell == (8):
(employee[cell]).clear()
if cell == (12):
(employee[cell]).clear()
if cell == (13):
(employee[cell]).clear()
if cell == (17):
(employee[cell]).clear()
if cell == (18):
(employee[cell]).clear()
if cell == (20):
(employee[cell]).clear()
if cell == (21):
(employee[cell]).clear()
if cell == (23):
(employee[cell]).clear()
if cell == (24):
(employee[cell]).clear()
if cell == (26):
(employee[cell]).clear()
if cell == (27):
(employee[cell]).clear()
if cell == (29):
(employee[cell]).clear()
# print (employees_data)
file = ''
n = 1
for employee in employees_data:
if n == 1:
n = 2
else:
file += '\n'
for c in employee:
if c == []:
file += ('' + ',')
else:
file += (str(c[0]) + ',')
# print (file)
database.close()
database = open('leave automation.csv', 'w+')
database.write(file)
# o = (database.read())
# print (o)
# print (database.read())
database.close()
|
import tensorflow as tf
import numpy as np
from utils import utils
class Model(object):
def __init__(self, token_emb_mat, glove_emb_mat, token_dict_size, char_dict_size, token_max_length, model_params, scope):
self.scope = scope
self.global_step = tf.get_variable('global_step', shape=[], dtype=tf.int32,
initializer=tf.constant_initializer(0), trainable=False)
self.token_emb_mat = token_emb_mat
self.glove_emb_mat = glove_emb_mat
self.model_params = model_params
# batch * length
self.que_token_seq = tf.placeholder(tf.int32, [None, None], name='que_tokens')
self.des_token_seq = tf.placeholder(tf.int32, [None, None], name='des_tokens')
self.ans_token_seq = tf.placeholder(tf.int32, [None, None], name='ans_tokens')
self.que_char_seq = tf.placeholder(tf.int32, [None, None, None], name='que_chars')
self.des_char_seq = tf.placeholder(tf.int32, [None, None, None], name='des_chars')
self.ans_char_seq = tf.placeholder(tf.int32, [None, None, None], name='ans_chars')
self.token_dict_size = token_dict_size
self.char_dict_size = char_dict_size
self.token_max_length = token_max_length
self.token_embedding_size = model_params.token_embedding_size
self.char_embedding_size = model_params.char_embedding_size
self.char_out_size = model_params.char_out_size
self.out_char_dimensions = model_params.out_channel_dims
self.filter_heights = model_params.filter_heights
self.hidden_size = model_params.hidden_size
self.finetune_emb = model_params.fine_tune
self.dropout = model_params.drop_out
self.que_token_mask = tf.cast(self.que_token_seq, tf.bool)
self.des_token_mask = tf.cast(self.des_token_seq, tf.bool)
self.ans_token_mask = tf.cast(self.ans_token_seq, tf.bool)
self.que_token_len = tf.reduce_sum(tf.cast(self.que_token_mask, tf.int32), -1)
self.des_token_len = tf.reduce_sum(tf.cast(self.des_token_mask, tf.int32), -1)
self.ans_token_len = tf.reduce_sum(tf.cast(self.ans_token_mask, tf.int32), -1)
self.que_char_mask = tf.cast(self.que_char_seq, tf.bool)
self.des_char_mask = tf.cast(self.des_char_seq, tf.bool)
self.ans_char_mask = tf.cast(self.ans_char_seq, tf.bool)
self.que_char_len = tf.reduce_sum(tf.cast(self.que_char_mask, tf.int32), -1)
self.des_char_len = tf.reduce_sum(tf.cast(self.des_char_mask, tf.int32), -1)
self.ans_char_len = tf.reduce_sum(tf.cast(self.ans_char_mask, tf.int32), -1)
self.answer_labels = tf.placeholder(tf.int32, [None], name='answer_label')
self.output_class = 2
self.is_train = tf.placeholder(tf.bool, [], name='is_train')
self.wd = model_params.wd
self.var_decay = model_params.var_decay
self.decay=model_params.decay
self.logits = None
self.loss = None
self.accuracy = None
self.train_op= None
self.summary = None
self.optimizer = None
self.ema =None
self.var_ema=None
self.update_tensor_add_ema_and_opt()
def build_network(self):
with tf.variable_scope('embedding'):
que_emb = utils.token_and_char_emb(True, self.que_token_seq ,self.token_dict_size,
self.token_embedding_size, self.token_emb_mat,
self.glove_emb_mat,
True, self.que_char_seq, self.char_dict_size,
self.char_embedding_size, self.char_out_size,
self.out_char_dimensions, self.filter_heights, False, None,
0., 1., True)
des_emb = utils.token_and_char_emb(True, self.des_token_seq ,self.token_dict_size,
self.token_embedding_size, self.token_emb_mat,
self.glove_emb_mat,
True, self.des_char_seq, self.char_dict_size,
self.char_embedding_size, self.char_out_size,
self.out_char_dimensions, self.filter_heights, False, None,
0., 1., True)
ans_emb = utils.token_and_char_emb(True, self.ans_token_seq ,self.token_dict_size,
self.token_embedding_size, self.token_emb_mat,
self.glove_emb_mat,
True, self.ans_char_seq, self.char_dict_size,
self.char_embedding_size, self.char_out_size,
self.out_char_dimensions, self.filter_heights, False, None,
0., 1., True)
with tf.variable_scope('qd_interaction'):
para, orth = utils.generate_para_orth(que_emb, des_emb, self.que_token_mask, self.des_token_mask,
scope='gene_para_orth', keep_prob=self.dropout, is_train=self.is_train,
wd=self.wd, activation='relu')
with tf.variable_scope('qa_interaction'):
q = tf.concat([para, orth], -1)
a = utils.bn_dense_layer(ans_emb, 2 * self.hidden_size, True, 0., scope='ans_tanh',
activation='tanh', enable_bn=False, wd=self.wd, keep_prob=self.dropout,
is_train=self.is_train) * \
utils.bn_dense_layer(ans_emb, 2 * self.hidden_size, True, 0., scope='ans_sigmoid',
activation='sigmoid', enable_bn=False, wd=self.wd, keep_prob=self.dropout,
is_train=self.is_train)
# assert a.get_shape()[1] == self.ans_token_mask.get_shape()[1]
q_inter, a_inter = utils.gene_qa_interaction(q, a, self.que_token_mask, self.ans_token_mask,
scope='qa_interaction', keep_prob=self.dropout, is_train=self.is_train,
wd=self.wd, activation='relu')
# (batch*answer_nums) * dimension
q_vec = utils.multi_dimensional_attention(q_inter, self.que_token_mask, 'q_vec' , self.dropout,
self.is_train, 0., 'relu')
a_vec = utils.multi_dimensional_attention(a_inter, self.ans_token_mask, 'a_vec' , self.dropout,
self.is_train, 0., 'relu')
with tf.variable_scope('output'):
final_rep = tf.concat([q_vec, a_vec], -1)
pre_logits = tf.nn.relu(utils.linear([final_rep], 300 , True, 0., scope='pre_logits',
squeeze=False, wd=self.wd, input_keep_prob=self.dropout,
is_train=self.is_train))
logits = utils.linear([pre_logits], self.output_class, True, 0., scope='logits',
squeeze=False, wd=self.wd, input_keep_prob=self.dropout,is_train=self.is_train)
return logits
def build_loss(self):
with tf.variable_scope("weight_decay"):
for var in set(tf.get_collection('reg_vars', self.scope)):
weight_decay = tf.multiply(tf.nn.l2_loss(var), self.wd,
name="{}-wd".format('-'.join(str(var.op.name).split('/'))))
tf.add_to_collection('losses', weight_decay)
reg_vars = tf.get_collection('losses', self.scope)
trainable_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope)
tf.logging.info("reg_vars num : %d " % len(reg_vars))
tf.logging.info("trainable_vars num : %d " % len(trainable_vars))
losses = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=self.answer_labels,
logits=self.logits,
)
tf.add_to_collection('losses', tf.reduce_mean(losses, name='xentropy_loss_mean'))
loss = tf.add_n(tf.get_collection('losses', self.scope), name='loss')
tf.summary.scalar(loss.op.name, loss)
tf.add_to_collection('ema/scalar', loss)
return loss
def build_accuracy(self):
correct = tf.equal(tf.cast(tf.argmax(self.logits, -1), tf.int32),
self.answer_labels)
return tf.cast(correct, tf.float32)
def update_tensor_add_ema_and_opt(self):
self.logits = self.build_network()
self.loss = self.build_loss()
self.accuracy = self.build_accuracy()
if True:
self.var_ema = tf.train.ExponentialMovingAverage(self.model_params.var_decay)
self.build_var_ema()
if self.model_params.mode == 'train':
self.ema = tf.train.ExponentialMovingAverage(self.model_params.decay)
self.build_ema()
self.summary = tf.summary.merge_all()
if self.model_params.optimizer.lower() == 'adadelta':
assert self.model_params.learning_rate > 0.1 and self.model_params.learning_rate < 1.
self.optimizer = tf.train.AdadeltaOptimizer(self.model_params.learning_rate)
elif self.model_params.optimizer.lower() == 'adam':
assert self.model_params.learning_rate < 0.1
self.optimizer = tf.train.AdamOptimizer(self.model_params.learning_rate)
else:
raise AttributeError("no optimizer named as '%s' " % self.model_params.optimizer)
self.train_op = self.optimizer.minimize(self.loss, self.global_step, var_list=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, self.scope))
def build_var_ema(self):
ema_op = self.var_ema.apply(tf.trainable_variables(), )
with tf.control_dependencies([ema_op]):
self.loss = tf.identity(self.loss)
def build_ema(self):
tensors = tf.get_collection("ema/scalar", scope=self.scope) + \
tf.get_collection("ema/vector", scope=self.scope)
ema_op = self.ema.apply(tensors)
for var in tf.get_collection("ema/scalar", scope=self.scope):
ema_var = self.ema.average(var)
tf.summary.scalar(ema_var.op.name, ema_var)
for var in tf.get_collection("ema/vector", scope=self.scope):
ema_var = self.ema.average(var)
tf.summary.histogram(ema_var.op.name, ema_var)
with tf.control_dependencies([ema_op]):
self.loss = tf.identity(self.loss)
def get_feed_dict(self, sample_batch, data_type='train'):
que_token_seq_b = []
que_char_seq_b = []
des_token_seq_b = []
des_char_seq_b = []
ans_token_seq_b = []
ans_char_seq_b = []
qsl, dsl, asl = 0, 0, 0
for sample in sample_batch:
qsl = max(qsl, len(sample['digitized_que_token']))
dsl = max(dsl, len(sample['digitized_des_token']))
asl = max(asl, len(sample['digitized_ans_token']))
for sample in sample_batch:
que_token = np.zeros([qsl], 'int32')
que_char = np.zeros([qsl, self.token_max_length], 'int32')
for idx_t, (token, char_seq_v) in enumerate(
zip(sample['digitized_que_token'], sample['digitized_que_char'])):
que_token[idx_t] = token
for idx_c, char in enumerate(char_seq_v):
if idx_c < self.token_max_length:
que_char[idx_t, idx_c] = char
des_token = np.zeros([dsl], 'int32')
des_char = np.zeros([dsl, self.token_max_length], 'int32')
for idx_t, (token, char_seq_v) in enumerate(
zip(sample['digitized_des_token'], sample['digitized_des_char'])):
des_token[idx_t] = token
for idx_c, char in enumerate(char_seq_v):
if idx_c < self.token_max_length:
des_char[idx_t, idx_c] = char
ans_token = np.zeros([asl],'int32')
ans_char = np.zeros([asl, self.token_max_length], 'int32')
for idx_t, (token, char_seq_v) in enumerate(
zip(sample['digitized_ans_token'], sample['digitized_ans_char'])):
ans_token[idx_t] = token
for idx_c, char in enumerate(char_seq_v):
if idx_c < self.token_max_length:
ans_char[idx_t, idx_c] = char
que_token_seq_b.append(que_token)
que_char_seq_b.append(que_char)
des_token_seq_b.append(des_token)
des_char_seq_b.append(des_char)
ans_token_seq_b.append(ans_token)
ans_char_seq_b.append(ans_char)
que_token_seq_b = np.stack(que_token_seq_b)
que_char_seq_b = np.stack(que_char_seq_b)
des_token_seq_b = np.stack(des_token_seq_b)
des_char_seq_b = np.stack(des_char_seq_b)
ans_token_seq_b = np.stack(ans_token_seq_b)
ans_char_seq_b = np.stack(ans_char_seq_b)
answer_label_b = []
for sample in sample_batch:
answer_label_int = None
if sample['label'] == 'true':
answer_label_int = 1
elif sample['label'] == 'false':
answer_label_int = 0
assert answer_label_int is not None
answer_label_b.append(answer_label_int)
answer_label_b = np.stack(answer_label_b).astype('int32')
feed_dict= {self.que_token_seq : que_token_seq_b, self.que_char_seq : que_char_seq_b,
self.des_token_seq : des_token_seq_b, self.des_char_seq : des_char_seq_b,
self.ans_token_seq : ans_token_seq_b, self.ans_char_seq : ans_char_seq_b,
self.answer_labels : answer_label_b,
self.is_train: True if data_type == 'train' else False,
}
return feed_dict
def step(self, sess, batch_samples, get_summary=False):
assert isinstance(sess, tf.Session)
feed_dict = self.get_feed_dict(batch_samples, 'train')
if get_summary:
loss, summary, train_op = sess.run([self.loss, self.summary, self.train_op], feed_dict=feed_dict)
else:
loss, train_op = sess.run([self.loss, self.train_op], feed_dict=feed_dict)
summary = None
return loss, summary, train_op
|
__author__ = 'trunghieu11'
def dfs(html, begin, end, type):
if html == "":
return ""
for i in range(3):
if html.find2(begin[i]) == 0:
html = html[len(begin[i]):]
return type[i] + "([" + dfs(html, begin, end, type)
if html.find2("<img />") == 0:
html = html[len("<img />"):]
return "IMG({})" + dfs(html, begin, end, type)
for i in range(3):
if html.find2(end[i]) == 0:
html = html[len(end[i]):]
return "])" + dfs(html, begin, end, type)
def htmlToLuna(html):
begin = ["<div>", "<p>", "<b>"]
end = ["</div>", "</p>", "</b>"]
type = ["DIV", "P", "B"]
return dfs(html, begin, end, type)
if __name__ == '__main__':
html = "<div><p><img /></p><b></b></div>"
print htmlToLuna(html) |
from threading import Thread, Lock
n = 10
def func(lock):
global n
lock.acquire()
n -= 1
lock.release()
lock = Lock()
for i in range(10):
Thread(target=func, args=(lock, )).start()
print(n) |
import pygame
import time
import random
pygame.init()
# 颜色设置
white = (255, 255, 255)
yellow = (255, 255, 102)
black = (0, 0, 0)
red = (213, 50, 80)
green = (0, 255, 0)
blue = (50, 153, 213)
# 显示吃准
dis_width = 600
dis_height = 400
# 显示模式
dis = pygame.display.set_mode((dis_width, dis_height))
# 设置标题
pygame.display.set_caption('Snake Game by Daniel')
# 设置时钟
clock = pygame.time.Clock()
# 蛇长度和速度
snake_block = 10
snake_speed = 10
# 字体
font_style = pygame.font.SysFont("bahnschrift", 25)
score_font = pygame.font.SysFont("comicsansms", 35)
# 画出蛇身体
def draw_our_snake(snake_block, snake_list):
for block in snake_list:
# block = [100, 100]
pygame.draw.rect(dis, green, [block[0], block[1], snake_block, snake_block])
# 启动
def gameLoop():
game_over = False
game_close = False
# # 开始位置在屏幕中央
x1 = dis_width / 2
y1 = dis_height / 2
# # 记录位置的更改
x1_change = 0
y1_change = 0
snake_List = []
Length_of_snake = 1
while not game_over:
#
# --------------------key event --------------------------------------
#
while game_close == True:
dis.fill(black)
pygame.display.update()
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_q:
game_over = True
game_close = False
if event.key == pygame.K_c:
gameLoop()
for event in pygame.event.get():
if event.type == pygame.QUIT:
game_over = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_LEFT:
#
x1_change = -snake_block
y1_change = 0
elif event.key == pygame.K_RIGHT:
# 右移动
x1_change = snake_block
y1_change = 0
elif event.key == pygame.K_UP:
y1_change = -snake_block
x1_change = 0
elif event.key == pygame.K_DOWN:
y1_change = snake_block
x1_change = 0
#
# --------------------key event --------------------------------------
#
# 计算将要移动到的位置 key left will cuase x1 = 300 - 10 = 290
x1 = x1 + x1_change
y1 = y1 + y1_change
dis.fill(black)
# # 生成一个蛇头
snake_Head = []
snake_Head.append(x1)
snake_Head.append(y1)
snake_List.append(snake_Head)
# # 去掉第一个元素
if len(snake_List) > Length_of_snake:
del snake_List[0]
# 画出蛇身体
draw_our_snake(snake_block, snake_List)
pygame.display.update()
# 每循环一次,就画一个,如果不动,就会覆盖原地
clock.tick(snake_speed)
pygame.quit()
quit()
gameLoop() |
from itertools import product
from random import randint, seed
from sse_validate_utf16le_proof import bitmask
# This is a copy from sse_validate_utf16le_proof.py with
# adjusted the mask for the 16-bit base
def mask(words):
L = bitmask(words, 'L')
H = bitmask(words, 'H')
V = (~(L | H)) & 0xffff
a = L & (H >> 1)
b = a << 1
c = V | a | b
return c
class Record:
def __init__(self):
self.words = []
def add(self, word):
self.words.append(word)
@property
def is_valid(self):
c = mask(self.words)
if c == 0xffff:
return True
if c == 0x7fff:
# in test we reject cases when 'L' or 'H' ends a chunk
if self.words[-1] in ('L', 'H'):
return False
else:
return True
return False
def __str__(self):
words = ''.join(self.words)
if self.is_valid:
return 'T' + words
else:
return 'F' + words
def test_words():
collection = set()
for seq in test_words_aux():
collection.add(tuple(seq))
return sorted(collection)
def test_words_aux():
# 1. all valid
yield ['V'] * 16
# 2. only low surrogates
yield ['L'] * 16
# 3. only high surrogates
yield ['H'] * 16
# 4. sole low surrogate
for i in range(16):
seq = ['V'] * 16
seq[i] = 'L'
yield seq
# 5. sole high surrogate
for i in range(16):
seq = ['V'] * 16
seq[i] = 'H'
yield seq
# 6. scattered three surrogates
for i in range(16):
for j in range(16):
for k in range(16):
seq = ['V'] * 16
for a, b, c in product('LH', repeat=3):
seq[i] = a
seq[j] = b
seq[k] = c
yield seq
# To cover all 16-byte inputs we would need 3**16 cases (43'046'721)
# Instead, we cover all possible 6-element combinations (3**6 = 729)
# and move it within 16-element input. This yields 729 * 10 cases.
k = 6
for combination in product('VLH', repeat=k):
for position in range(16 - k):
seq = ['V'] * 16
for i, v in enumerate(combination):
seq[i + position] = v
yield seq
TXT = """# generated by scripts/sse_validate_utf16le_testcases.py
"""
def write_file(file):
file.write(TXT)
for words in test_words():
record = Record()
for word in words:
record.add(word)
file.write(str(record))
file.write('\n')
def main():
seed(0)
with open('validate_utf16_testcases.txt', 'w') as f:
write_file(f)
if __name__ == '__main__':
main()
|
#?description=Use the specialized IDexUnit interface to replace all dex strings 'text/html' by 'foobar'.
#?shortcut=
from com.pnfsoftware.jeb.client.api import IScript
from com.pnfsoftware.jeb.core.units.code import ICodeUnit, ICodeItem
from com.pnfsoftware.jeb.core.units.code.android import IDexUnit
from com.pnfsoftware.jeb.core.units.code.android.dex import IDexString
"""
Sample script for JEB Decompiler.
"""
class DexManipulation(IScript):
def run(self, ctx):
prj = ctx.getMainProject()
assert prj, 'Need a project'
for codeUnit in prj.findUnits(IDexUnit):
self.processDex(codeUnit)
def processDex(self, dex):
# replace DEX strings
cnt = 0
for s in dex.getStrings():
if s.getValue().startswith('text/html'):
s.setValue('foobar')
cnt += 1
print('String replaced')
if cnt > 0:
dex.notifyGenericChange()
|
# David Powis-Dow CS 101:Python
# 2016-10-02 v0.1
# Chapter 2 Exercise 5: Compound Interest Calculation with Years Input
# user_response = int(input("Please enter the number of years the money be compounded for?: "))
# population = int(input("Population of Toronto? "))
# var_t = (user_response) # Number of Years
var_P = (10000) # Initial Sum Invested
var_n = (12) # Number Times Compounded per Year - Months
var_r = (8/100) # Interest Rate
var_t = float(input("Please enter the number of years the money be compounded for?: "))
#var_body = float(1 + (var_r / var_n)) # (1 + r/n)
#var_exponent = float(var_n * var_t) # nt
#var_A = float(var_P * var_body) ** var_exponent
var_A = var_P * (1 + var_r / var_n) ** (var_n * var_t)
# P(1 + r/n)^nt
# A = P(1+r/n)**NT
print("Compounded amount is", var_A)
|
import unittest
from puzzles.day2.intcode import process_intcode, find_initial_inputs
class TestIntcode(unittest.TestCase):
def test_process_gravity_assist(self):
self.assertEqual(process_intcode([1, 9, 10, 3, 2, 3, 11, 0, 99, 30, 40, 50]), [3500, 9, 10, 70, 2, 3, 11, 0, 99, 30, 40, 50])
self.assertEqual(process_intcode([1, 0, 0, 0, 99]), [2, 0, 0, 0, 99])
self.assertEqual(process_intcode([2, 3, 0, 3, 99]), [2, 3, 0, 6, 99])
self.assertEqual(process_intcode([2, 4, 4, 5, 99, 0]), [2, 4, 4, 5, 99, 9801])
self.assertEqual(process_intcode([1, 1, 1, 4, 99, 5, 6, 0, 99]), [30, 1, 1, 4, 2, 5, 6, 0, 99])
def test_init_memory(self):
self.assertEqual(find_initial_inputs('1,0,0,3,1,3,1,0,99,30,40,50', 2), 'Found!: noun 0, verb 0')
self.assertEqual(find_initial_inputs('2,0,0,3,1,3,1,0,99,30,40,50', 4), 'Found!: noun 0, verb 0')
self.assertEqual(find_initial_inputs('1,0,0,3,2,3,3,0,99,30,40,50', 9), 'Found!: noun 0, verb 2')
|
""" Views for the base application """
from django.shortcuts import render_to_response
from django.template import RequestContext
def home(request):
""" Default view for the root """
return render_to_response('base/home.html',
context_instance=RequestContext(request))
def documentation(request):
"""View for the documentation"""
return render_to_response('base/documentation.html',
context_instance=RequestContext(request))
|
import math
datea = input("Enter first date using format (dd/mm/yyyy):")
s1=datea.split('/')
print(s1[:])
d1 = eval(s1[0])
m1 = eval(s1[1])
y1 = eval(s1[2])
c1 = 365*y1+ math.floor(y1/4)- math.floor(y1/100)+ math.floor(y1/400)+ math.floor((306*m1+5)/10)+(d1-1)
dateb = input("Enter second date using format (dd/mm/yyyy):")
s2=dateb.split('/')
print(s2[:])
d2 = eval(s2[0])
m2 = eval(s2[1])
y2 = eval(s2[2])
c2 = 365*y2+ math.floor(y2/4)- math.floor(y2/100)+ math.floor(y2/400)+ math.floor((306*m2+5)/10)+(d2-1)
c = abs(c1-c2)
print(c, " days")
|
import time
def time_this(num_runs=10):
def wrapp(func):
def time_test():
print("Старт теста")
time_compare = 0
for i in range(num_runs):
t0 = time.time()
func()
t1 = time.time()
time_compare += (t1 - t0)
time_compare /= num_runs
print("Конец теста.")
print("время меняется, запустить несколько раз")
print("Среднее время выполнения %sсекунд" %(time_compare))
return time_test
return wrapp
@time_this(10)
def fobina():
def sub(n1 = 0, n2 = 1):
sum = n1 + n2
if sum < 40000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000:
sub(n2, sum)
else: print(sum)
return n2
return sub()
fobina()
print("end") |
# Generated by Django 3.1.5 on 2021-02-11 03:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('shop', '0009_slidercontent'),
]
operations = [
migrations.CreateModel(
name='CartProduct',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quanty', models.PositiveIntegerField(default=1)),
('price', models.PositiveIntegerField(default=0, verbose_name='Tovar narxi')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='shop.product', verbose_name='Tovar')),
],
),
migrations.CreateModel(
name='Cart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('total_products', models.PositiveIntegerField(default=0)),
('total_price', models.PositiveIntegerField(default=0, verbose_name='Umumiy narxi')),
('products', models.ManyToManyField(null=True, related_name='savatcha_tovar', to='shop.CartProduct')),
],
),
]
|
from django.test import TestCase
from users.models import User
from .fake_transactions import transactions_data
from ..exceptions import InvalidReport
from ..reports.summary_by_account import SummaryByAccount
from ..use_cases import CreateTransactions
class SummaryByAccountTestCase(TestCase):
def setUp(self):
User.objects.create(
name="user1",
email="user1@gmail.com",
age=12
)
CreateTransactions(1).execute(transactions_data)
def test_summary_without_dates(self):
response = SummaryByAccount.execute({"user": 1})
expected = [
{
"account": "C00099",
"balance": "1738.87",
"total_inflow": "2500.72",
"total_outflow": "-761.85"
},
{
"account": "S00012",
"balance": "150.72",
"total_inflow": "150.72",
"total_outflow": "0.00"
}
]
self.assertEqual(response, expected)
def test_summary_with_dates(self):
params = {
"user": 1,
"from_date": "2020-01-01",
"to_date": "2020-01-04"
}
response = SummaryByAccount.execute(params)
expected = [
{
"account": "C00099",
"balance": "-51.13",
"total_inflow": "0.00",
"total_outflow": "-51.13"
}
]
self.assertEqual(response, expected)
def test_summary_with_wrong_dates(self):
params = {
"user": 1,
"from_date": "2020-01-01"
}
with self.assertRaises(InvalidReport):
SummaryByAccount.execute(params)
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ProductManager(models.Manager):
def for_user(self, user):
return self.filter(created_by=user)
class Product(models.Model):
name = models.CharField(max_length=255)
price = models.IntegerField()
quantity = models.IntegerField()
created_by = models.ForeignKey(User, on_delete=models.CASCADE)
objects = ProductManager()
def __str__(self):
return '{}: {}'.format(self.id, self.name)
class UserProduct(models.Model):
user = models.ForeignKey(User, on_delete=models.CASCADE)
product = models.ForeignKey(Product, on_delete=models.CASCADE, related_name='products')
count = models.IntegerField(default=0)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'calendar.ui'
#
# Created by: PyQt5 UI code generator 5.9.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName("Dialog")
Dialog.resize(801, 407)
self.textBrowser = QtWidgets.QTextBrowser(Dialog)
self.textBrowser.setGeometry(QtCore.QRect(0, -10, 821, 431))
self.textBrowser.setLineWidth(0)
self.textBrowser.setObjectName("textBrowser")
self.groupBox = QtWidgets.QGroupBox(Dialog)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 351, 231))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox.setFont(font)
self.groupBox.setObjectName("groupBox")
self.calendarWidget = QtWidgets.QCalendarWidget(self.groupBox)
self.calendarWidget.setGeometry(QtCore.QRect(10, 20, 331, 201))
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.calendarWidget.setFont(font)
self.calendarWidget.setSelectedDate(QtCore.QDate(2017, 5, 30))
self.calendarWidget.setGridVisible(True)
self.calendarWidget.setObjectName("calendarWidget")
self.groupBox_2 = QtWidgets.QGroupBox(Dialog)
self.groupBox_2.setGeometry(QtCore.QRect(370, 10, 431, 231))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox_2.setFont(font)
self.groupBox_2.setObjectName("groupBox_2")
self.comboBox_4 = QtWidgets.QComboBox(self.groupBox_2)
self.comboBox_4.setGeometry(QtCore.QRect(130, 180, 271, 21))
self.comboBox_4.setObjectName("comboBox_4")
self.comboBox_4.addItem("")
self.comboBox_4.addItem("")
self.checkBox = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBox.setGeometry(QtCore.QRect(20, 120, 51, 17))
font = QtGui.QFont()
font.setPointSize(9)
self.checkBox.setFont(font)
self.checkBox.setObjectName("checkBox")
self.checkBox_2 = QtWidgets.QCheckBox(self.groupBox_2)
self.checkBox_2.setGeometry(QtCore.QRect(300, 120, 101, 21))
font = QtGui.QFont()
font.setPointSize(9)
self.checkBox_2.setFont(font)
self.checkBox_2.setObjectName("checkBox_2")
self.layoutWidget_2 = QtWidgets.QWidget(self.groupBox_2)
self.layoutWidget_2.setGeometry(QtCore.QRect(10, 30, 391, 81))
self.layoutWidget_2.setObjectName("layoutWidget_2")
self.formLayout_3 = QtWidgets.QFormLayout(self.layoutWidget_2)
self.formLayout_3.setContentsMargins(0, 0, 0, 0)
self.formLayout_3.setObjectName("formLayout_3")
self.lineEdit_3 = QtWidgets.QLineEdit(self.layoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_3.setFont(font)
self.lineEdit_3.setAcceptDrops(True)
self.lineEdit_3.setAutoFillBackground(False)
self.lineEdit_3.setFrame(False)
self.lineEdit_3.setDragEnabled(False)
self.lineEdit_3.setReadOnly(False)
self.lineEdit_3.setClearButtonEnabled(False)
self.lineEdit_3.setObjectName("lineEdit_3")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.lineEdit_3)
self.comboBox = QtWidgets.QComboBox(self.layoutWidget_2)
self.comboBox.setObjectName("comboBox")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.comboBox.addItem("")
self.formLayout_3.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.comboBox)
self.lineEdit_4 = QtWidgets.QLineEdit(self.layoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_4.setFont(font)
self.lineEdit_4.setAcceptDrops(True)
self.lineEdit_4.setAutoFillBackground(False)
self.lineEdit_4.setFrame(False)
self.lineEdit_4.setDragEnabled(False)
self.lineEdit_4.setReadOnly(False)
self.lineEdit_4.setClearButtonEnabled(False)
self.lineEdit_4.setObjectName("lineEdit_4")
self.formLayout_3.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.lineEdit_4)
self.comboBox_2 = QtWidgets.QComboBox(self.layoutWidget_2)
self.comboBox_2.setObjectName("comboBox_2")
self.comboBox_2.addItem("")
self.comboBox_2.addItem("")
self.comboBox_2.addItem("")
self.comboBox_2.addItem("")
self.formLayout_3.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.comboBox_2)
self.lineEdit_5 = QtWidgets.QLineEdit(self.layoutWidget_2)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_5.setFont(font)
self.lineEdit_5.setAcceptDrops(True)
self.lineEdit_5.setAutoFillBackground(False)
self.lineEdit_5.setFrame(False)
self.lineEdit_5.setDragEnabled(False)
self.lineEdit_5.setReadOnly(False)
self.lineEdit_5.setClearButtonEnabled(False)
self.lineEdit_5.setObjectName("lineEdit_5")
self.formLayout_3.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.lineEdit_5)
self.comboBox_3 = QtWidgets.QComboBox(self.layoutWidget_2)
self.comboBox_3.setObjectName("comboBox_3")
self.comboBox_3.addItem("")
self.comboBox_3.addItem("")
self.formLayout_3.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.comboBox_3)
self.lineEdit_9 = QtWidgets.QLineEdit(self.groupBox_2)
self.lineEdit_9.setGeometry(QtCore.QRect(10, 180, 91, 20))
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_9.setFont(font)
self.lineEdit_9.setAcceptDrops(True)
self.lineEdit_9.setAutoFillBackground(False)
self.lineEdit_9.setFrame(False)
self.lineEdit_9.setDragEnabled(False)
self.lineEdit_9.setReadOnly(False)
self.lineEdit_9.setClearButtonEnabled(False)
self.lineEdit_9.setObjectName("lineEdit_9")
self.comboBox_5 = QtWidgets.QComboBox(self.groupBox_2)
self.comboBox_5.setGeometry(QtCore.QRect(130, 150, 271, 21))
self.comboBox_5.setObjectName("comboBox_5")
self.comboBox_5.addItem("")
self.comboBox_5.addItem("")
self.comboBox_5.addItem("")
self.comboBox_5.addItem("")
self.lineEdit_8 = QtWidgets.QLineEdit(self.groupBox_2)
self.lineEdit_8.setGeometry(QtCore.QRect(10, 150, 101, 20))
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_8.setFont(font)
self.lineEdit_8.setAcceptDrops(True)
self.lineEdit_8.setAutoFillBackground(False)
self.lineEdit_8.setFrame(False)
self.lineEdit_8.setDragEnabled(False)
self.lineEdit_8.setReadOnly(False)
self.lineEdit_8.setClearButtonEnabled(False)
self.lineEdit_8.setObjectName("lineEdit_8")
self.groupBox_3 = QtWidgets.QGroupBox(Dialog)
self.groupBox_3.setGeometry(QtCore.QRect(10, 250, 351, 141))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName("groupBox_3")
self.layoutWidget = QtWidgets.QWidget(self.groupBox_3)
self.layoutWidget.setGeometry(QtCore.QRect(10, 20, 321, 101))
self.layoutWidget.setObjectName("layoutWidget")
self.formLayout_2 = QtWidgets.QFormLayout(self.layoutWidget)
self.formLayout_2.setContentsMargins(0, 0, 0, 0)
self.formLayout_2.setObjectName("formLayout_2")
self.dateEdit = QtWidgets.QDateEdit(self.layoutWidget)
self.dateEdit.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.dateEdit.setInputMethodHints(QtCore.Qt.ImhNone)
self.dateEdit.setWrapping(False)
self.dateEdit.setFrame(True)
self.dateEdit.setAccelerated(False)
self.dateEdit.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToPreviousValue)
self.dateEdit.setKeyboardTracking(True)
self.dateEdit.setProperty("showGroupSeparator", False)
self.dateEdit.setCurrentSection(QtWidgets.QDateTimeEdit.DaySection)
self.dateEdit.setObjectName("dateEdit")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.dateEdit)
self.line_11 = QtWidgets.QFrame(self.layoutWidget)
self.line_11.setFrameShape(QtWidgets.QFrame.VLine)
self.line_11.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_11.setObjectName("line_11")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.line_11)
self.lineEdit_11 = QtWidgets.QLineEdit(self.layoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_11.setFont(font)
self.lineEdit_11.setAcceptDrops(True)
self.lineEdit_11.setAutoFillBackground(False)
self.lineEdit_11.setFrame(False)
self.lineEdit_11.setDragEnabled(False)
self.lineEdit_11.setReadOnly(False)
self.lineEdit_11.setClearButtonEnabled(False)
self.lineEdit_11.setObjectName("lineEdit_11")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.lineEdit_11)
self.dateEdit_2 = QtWidgets.QDateEdit(self.layoutWidget)
self.dateEdit_2.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.dateEdit_2.setInputMethodHints(QtCore.Qt.ImhNone)
self.dateEdit_2.setWrapping(False)
self.dateEdit_2.setFrame(True)
self.dateEdit_2.setAccelerated(False)
self.dateEdit_2.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToPreviousValue)
self.dateEdit_2.setKeyboardTracking(True)
self.dateEdit_2.setProperty("showGroupSeparator", False)
self.dateEdit_2.setCurrentSection(QtWidgets.QDateTimeEdit.MonthSection)
self.dateEdit_2.setObjectName("dateEdit_2")
self.formLayout_2.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.dateEdit_2)
self.lineEdit_12 = QtWidgets.QLineEdit(self.layoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_12.setFont(font)
self.lineEdit_12.setAcceptDrops(True)
self.lineEdit_12.setAutoFillBackground(False)
self.lineEdit_12.setFrame(False)
self.lineEdit_12.setDragEnabled(False)
self.lineEdit_12.setReadOnly(False)
self.lineEdit_12.setClearButtonEnabled(False)
self.lineEdit_12.setObjectName("lineEdit_12")
self.formLayout_2.setWidget(4, QtWidgets.QFormLayout.LabelRole, self.lineEdit_12)
self.dateEdit_3 = QtWidgets.QDateEdit(self.layoutWidget)
self.dateEdit_3.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
self.dateEdit_3.setInputMethodHints(QtCore.Qt.ImhNone)
self.dateEdit_3.setWrapping(False)
self.dateEdit_3.setFrame(True)
self.dateEdit_3.setAccelerated(False)
self.dateEdit_3.setCorrectionMode(QtWidgets.QAbstractSpinBox.CorrectToPreviousValue)
self.dateEdit_3.setKeyboardTracking(True)
self.dateEdit_3.setProperty("showGroupSeparator", False)
self.dateEdit_3.setCurrentSection(QtWidgets.QDateTimeEdit.MonthSection)
self.dateEdit_3.setObjectName("dateEdit_3")
self.formLayout_2.setWidget(4, QtWidgets.QFormLayout.FieldRole, self.dateEdit_3)
self.lineEdit_10 = QtWidgets.QLineEdit(self.layoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_10.setFont(font)
self.lineEdit_10.setAcceptDrops(True)
self.lineEdit_10.setAutoFillBackground(False)
self.lineEdit_10.setFrame(False)
self.lineEdit_10.setDragEnabled(False)
self.lineEdit_10.setReadOnly(False)
self.lineEdit_10.setClearButtonEnabled(False)
self.lineEdit_10.setObjectName("lineEdit_10")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.lineEdit_10)
self.groupBox_4 = QtWidgets.QGroupBox(Dialog)
self.groupBox_4.setGeometry(QtCore.QRect(370, 250, 421, 141))
font = QtGui.QFont()
font.setPointSize(10)
self.groupBox_4.setFont(font)
self.groupBox_4.setObjectName("groupBox_4")
self.formLayoutWidget = QtWidgets.QWidget(self.groupBox_4)
self.formLayoutWidget.setGeometry(QtCore.QRect(10, 20, 391, 81))
self.formLayoutWidget.setObjectName("formLayoutWidget")
self.formLayout_6 = QtWidgets.QFormLayout(self.formLayoutWidget)
self.formLayout_6.setContentsMargins(0, 0, 0, 0)
self.formLayout_6.setObjectName("formLayout_6")
self.lineEdit_13 = QtWidgets.QLineEdit(self.formLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_13.setFont(font)
self.lineEdit_13.setAcceptDrops(True)
self.lineEdit_13.setAutoFillBackground(False)
self.lineEdit_13.setFrame(False)
self.lineEdit_13.setDragEnabled(False)
self.lineEdit_13.setReadOnly(False)
self.lineEdit_13.setClearButtonEnabled(False)
self.lineEdit_13.setObjectName("lineEdit_13")
self.formLayout_6.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.lineEdit_13)
self.comboBox_6 = QtWidgets.QComboBox(self.formLayoutWidget)
self.comboBox_6.setObjectName("comboBox_6")
self.comboBox_6.addItem("")
self.comboBox_6.addItem("")
self.comboBox_6.addItem("")
self.comboBox_6.addItem("")
self.formLayout_6.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.comboBox_6)
self.lineEdit_14 = QtWidgets.QLineEdit(self.formLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_14.setFont(font)
self.lineEdit_14.setAcceptDrops(True)
self.lineEdit_14.setAutoFillBackground(False)
self.lineEdit_14.setFrame(False)
self.lineEdit_14.setDragEnabled(False)
self.lineEdit_14.setReadOnly(False)
self.lineEdit_14.setClearButtonEnabled(False)
self.lineEdit_14.setObjectName("lineEdit_14")
self.formLayout_6.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.lineEdit_14)
self.comboBox_7 = QtWidgets.QComboBox(self.formLayoutWidget)
self.comboBox_7.setObjectName("comboBox_7")
self.comboBox_7.addItem("")
self.comboBox_7.addItem("")
self.comboBox_7.addItem("")
self.comboBox_7.addItem("")
self.formLayout_6.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.comboBox_7)
self.lineEdit_15 = QtWidgets.QLineEdit(self.formLayoutWidget)
font = QtGui.QFont()
font.setPointSize(9)
font.setKerning(True)
self.lineEdit_15.setFont(font)
self.lineEdit_15.setAcceptDrops(True)
self.lineEdit_15.setAutoFillBackground(False)
self.lineEdit_15.setFrame(False)
self.lineEdit_15.setDragEnabled(False)
self.lineEdit_15.setReadOnly(False)
self.lineEdit_15.setClearButtonEnabled(False)
self.lineEdit_15.setObjectName("lineEdit_15")
self.formLayout_6.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.lineEdit_15)
self.comboBox_8 = QtWidgets.QComboBox(self.formLayoutWidget)
self.comboBox_8.setObjectName("comboBox_8")
self.comboBox_8.addItem("")
self.comboBox_8.addItem("")
self.comboBox_8.addItem("")
self.formLayout_6.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.comboBox_8)
self.checkBox_3 = QtWidgets.QCheckBox(self.groupBox_4)
self.checkBox_3.setGeometry(QtCore.QRect(10, 110, 120, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.checkBox_3.setFont(font)
self.checkBox_3.setObjectName("checkBox_3")
self.checkBox_4 = QtWidgets.QCheckBox(self.groupBox_4)
self.checkBox_4.setGeometry(QtCore.QRect(310, 110, 250, 21))
font = QtGui.QFont()
font.setPointSize(9)
self.checkBox_4.setFont(font)
self.checkBox_4.setChecked(False)
self.checkBox_4.setTristate(False)
self.checkBox_4.setObjectName("checkBox_4")
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
_translate = QtCore.QCoreApplication.translate
Dialog.setWindowTitle(_translate("Dialog", "Dialog"))
self.groupBox.setTitle(_translate("Dialog", "Preview"))
self.groupBox_2.setTitle(_translate("Dialog", "General Options"))
self.comboBox_4.setItemText(0, _translate("Dialog", "Single Selection"))
self.comboBox_4.setItemText(1, _translate("Dialog", "No Selection"))
self.checkBox.setText(_translate("Dialog", "Grid"))
self.checkBox_2.setText(_translate("Dialog", "Navigation Bar"))
self.lineEdit_3.setText(_translate("Dialog", "Locale"))
self.comboBox.setItemText(0, _translate("Dialog", "German/Germany"))
self.comboBox.setItemText(1, _translate("Dialog", "Indonesian"))
self.comboBox.setItemText(2, _translate("Dialog", "English"))
self.comboBox.setItemText(3, _translate("Dialog", "Malaysian"))
self.lineEdit_4.setText(_translate("Dialog", "Week starts on"))
self.comboBox_2.setItemText(0, _translate("Dialog", "Sunday"))
self.comboBox_2.setItemText(1, _translate("Dialog", "Monday"))
self.comboBox_2.setItemText(2, _translate("Dialog", "Saturday"))
self.comboBox_2.setItemText(3, _translate("Dialog", "None"))
self.lineEdit_5.setText(_translate("Dialog", "Selection mode"))
self.comboBox_3.setItemText(0, _translate("Dialog", "Single Selection"))
self.comboBox_3.setItemText(1, _translate("Dialog", "No Selection"))
self.lineEdit_9.setText(_translate("Dialog", "Vertical header"))
self.comboBox_5.setItemText(0, _translate("Dialog", "Short day names"))
self.comboBox_5.setItemText(1, _translate("Dialog", "Monday"))
self.comboBox_5.setItemText(2, _translate("Dialog", "Saturday"))
self.comboBox_5.setItemText(3, _translate("Dialog", "None"))
self.lineEdit_8.setText(_translate("Dialog", "Horizontal header"))
self.groupBox_3.setTitle(_translate("Dialog", "Preview"))
self.lineEdit_11.setText(_translate("Dialog", "Current Date"))
self.lineEdit_12.setText(_translate("Dialog", "Maximum Date"))
self.lineEdit_10.setText(_translate("Dialog", "Minimum Date"))
self.groupBox_4.setTitle(_translate("Dialog", "Preview"))
self.lineEdit_13.setText(_translate("Dialog", "Weekday color"))
self.comboBox_6.setItemText(0, _translate("Dialog", "Black"))
self.comboBox_6.setItemText(1, _translate("Dialog", "Red"))
self.comboBox_6.setItemText(2, _translate("Dialog", "Blue"))
self.comboBox_6.setItemText(3, _translate("Dialog", "Grey"))
self.lineEdit_14.setText(_translate("Dialog", "Weekend color"))
self.comboBox_7.setItemText(0, _translate("Dialog", "Black"))
self.comboBox_7.setItemText(1, _translate("Dialog", "Red"))
self.comboBox_7.setItemText(2, _translate("Dialog", "Blue"))
self.comboBox_7.setItemText(3, _translate("Dialog", "Grey"))
self.lineEdit_15.setText(_translate("Dialog", "Header text"))
self.comboBox_8.setItemText(0, _translate("Dialog", "Bold"))
self.comboBox_8.setItemText(1, _translate("Dialog", "Italic"))
self.comboBox_8.setItemText(2, _translate("Dialog", "Standard"))
self.checkBox_3.setText(_translate("Dialog", "First Friday in Blue"))
self.checkBox_4.setText(_translate("Dialog", "May 1 in red"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Dialog = QtWidgets.QDialog()
ui = Ui_Dialog()
ui.setupUi(Dialog)
Dialog.show()
sys.exit(app.exec_())
|
from itertools import permutations
N,M,R=map(int,input().split())
*r,=map(int,input().split())
inf=float("inf")
d=[[inf]*N for _ in range(N)]
for _ in range(M):
A,B,C=map(int,input().split())
d[A-1][B-1]=C
d[B-1][A-1]=C
for k in range(N):
for i in range(N):
for j in range(N):
d[i][j]=min(d[i][j],d[i][k]+d[k][j])
ans=inf
for i in permutations(r):
v=0
for j in range(R-1):
v+=d[i[j]-1][i[j+1]-1]
ans=min(ans,v)
print(ans) |
from django.shortcuts import render
from django.http import HttpResponse
from .models import Score
def index(request):
return render(request, 'bwvsearchweb/index.html')
def search(request):
chords = request.POST['chords']
hits = Score.objects.with_pitched_chords(chords)
return render(request, 'bwvsearchweb/search.html', {
'chords': chords,
'hits': hits
})
|
# def form_new_list(new_list):
# return [str(element) for element in new_list if type(element) == int or type(element) == float]
#
# new_list = [1,2,3,4,5,6,9.8,8.2,9.9,1.3,'sumit','saurav',[1,2,'sumit']]
#
# print(form_new_list(new_list))
print({i:('odd' if(i%2!=0) else 'even') for i in range(1,11)})
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-;
# TODO: 缓存,选择性输出
import os
os.chdir(os.path.split(os.path.realpath(__file__))[0])
def match(text, name):
for x in text.splitlines():
x = x.split(':')
if name == x[0]:
return x[1]
if os.path.exists("main.ini"):
with open("main.ini", "r") as f:
f = f.read().replace(' ', '')
lib_list, location = match(f, 'lib_list').split(
','), match(f, 'location')
else:
from lib.lib import lib_list
lib_list = lib_list()
print("未检测到配置,请输入")
print("1.输入 API Key(可留空,留空则跳过获取)")
api, text = [], []
for x in lib_list:
key = input(' -请输入 {0} 的 API Key:'.format(x))
text.append(x + ': ' + key)
if key:
api.append(x)
text.append('lib_list: ' + ','.join(api))
print("2.输入城市信息,必填 可参考 https://www.ipip.net/ip.html")
text.append('location: ' + input(" -请输入纬度经度(示例:39.93,116.40):"))
with open("main.ini", "w") as f:
f.write("\n".join(text))
exit("\n配置已存至./main.ini,请检查后重新运行。")
for x in lib_list:
exec("from lib.{0} import {0}".format(x))
exec("t = {0}('{1}', '{2}')".format(x, match(f, x), location))
print(t.now())
print(t.daily())
print(x + "\n")
|
# -*- coding: utf-8 -*-
import pandas as pd
nodes = pd.read_csv("data/iterim/nodes.csv")
nodes_with_communities = pd.read_csv("data/iterim/nodes_with_communities.csv")
test = pd.read_csv("data/iterim/test.csv")
# creating test feature hash
test["hash"] = pd.Series(test.loc[:, ["X1", "X2", "X3", "X4", "X5", "X6"]].values.tolist()).map(lambda x: ''.join(map(str, x)))
nodes = nodes.merge(
right=nodes_with_communities,
right_on="node_id",
left_on="node_id"
)
test = test.merge(
right=nodes,
right_on="hash",
left_on="hash",
how="left"
)
test.to_csv("data/iterim/test.csv", index=False)
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("DQM")
# DQM service
process.load("DQMServices.Core.DQMStore_cfi")
# MessageLogger
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.MessageLogger.cerr.FwkReport.reportEvery = 1000
#process.MessageLogger.cerr.INFO.limit = 1000
import FWCore.ParameterSet.VarParsing as VarParsing
options = VarParsing.VarParsing ('analysis')
options.register('dqmTag','/HLT/TrigObjTnpSource/All',options.multiplicity.singleton,options.varType.string," whether we are running on miniAOD or not")
options.parseArguments()
process.options = cms.untracked.PSet(
wantSummary = cms.untracked.bool( True ),
numberOfThreads = cms.untracked.uint32( 4 ),
numberOfStreams = cms.untracked.uint32( 4 ),
)
# Source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(options.inputFiles)
)
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(options.maxEvents) )
process.load("DQM.HLTEvF.trigObjTnPSource_cfi")
process.load('DQMServices.Components.DQMFileSaver_cfi')
process.dqmSaver.workflow = options.dqmTag
process.endp = cms.EndPath( process.trigObjTnPSource + process.dqmSaver )
|
class Solution:
def areSentencesSimilar(self, sentence1: List[str], sentence2: List[str], similarPairs: List[List[str]]) -> bool:
"""
sentence1 = ["great","acting","skills", "fine", "fine", "great"]
sentence2 = ["fine","drama","talent", "great", "good"]
d = {
"great" : [fine]
"acting" : ["drama"]
"skills" : ["talent"]
"fine" : ["good"]
}
great = good
good = awesome great != awesome
great : [good]
good : [great, awesome]
awesome: [good]
"""
# base cases
if len(sentence1) != len(sentence2):
return False
d = {}
for pair in similarPairs:
first, second = pair
if first not in d:
d[first] = []
d[first].append(second)
if second not in d:
d[second] = []
d[second].append(first)
for i in range(len(sentence1)):
firstWord = sentence1[i]
secondWord = sentence2[i]
if firstWord == secondWord:
continue
if firstWord not in d or secondWord not in d:
if firstWord != secondWord:
return False
if secondWord not in d[firstWord]:
return False
return True
|
from newio.sync import Event
class Future:
def __init__(self):
self._event = Event()
self._result = None
self._exception = None
async def set_result(self, result):
self._result = result
await self._event.set()
async def set_exception(self, exception):
self._exception = exception
await self._event.set()
async def _get_result(self):
await self._event.wait()
if self._exception is not None:
raise self._exception
return self._result
def __await__(self):
"""Future is awaitable
PS: I don't know how to implement __await__, but I know coroutine
implemented it, so just forward the call!
"""
return self._get_result().__await__()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
import heapq
import operator
import numpy
import datetime
import time
def factorization(train, r, g, au, ts, bias=False, svd=True, svd_pp=False, steps=25, gamma=0.04, gamma1=0.04 , gamma2=0.04,
slow_rate=0.93, Lambda=0.1, k=30, k1=30, k2=30, ratio=None, seed=0, w1=1, w2=1, w3=1, pop=False):
global release, genres, aux
release = r
genres = g
aux = au
global _user_items
_user_items = train
numpy.random.seed(seed)
global _bias, _svd, _svd_pp, _k, _k1, _k2, _weight1, _weight2, _weight3, _times
_bias = bias
_svd = svd
_svd_pp = svd_pp
_k = k
_k1 = k1
_k2 = k2
_weight1 = w1
_weight2 = w2
_weight3 = w3
_times = ts
global _bu, _bi, _pu, _pu1, _pu2, _pi, _qi, _qt, _qi1, _qi2, _qi3, _z, _movie_list, _movie_set, _avr, _tot
global _bt, _bd
_bt = {}
_bu = {}
_bi = {}
_pu = {}
_pu1 = {}
_pu2 = {}
_qi = {}
_qi1 = {}
_qi2 = {}
_qi3 = {}
_pi = {}
_qt = {}
_z = {}
_bd = {}
sqrt_item_len = {}
_movie_list = []
_avr = 0
_tot = 0
y = {}
for i in range(36500):
_qt.setdefault(i, numpy.random.random((_k2, 1)) / numpy.sqrt(_k2))
for user, items in _user_items.iteritems():
if _bias:
_bu.setdefault(user, 0)
if _svd:
_pu.setdefault(user, numpy.random.random((_k, 1)) / numpy.sqrt(_k) * w1)
_pu1.setdefault(user, numpy.random.random((_k1, 1)) / numpy.sqrt(_k1) * w2)
_pu2.setdefault(user, numpy.random.random((_k2, 1)) / numpy.sqrt(_k2) * w3)
if _svd_pp:
sqrt_item_len.setdefault(user, numpy.sqrt(len(items)))
for item, rating in items.iteritems():
_movie_list.append(item)
if _bias:
_bi.setdefault(item, 0)
if _svd:
_qi.setdefault(item, numpy.random.random((_k, 1)) / numpy.sqrt(_k) * w1)
_qi1.setdefault(release[item], numpy.random.random((_k1, 1)) / numpy.sqrt(_k1) * w2)
genres.setdefault(item, 0)
_qi2.setdefault(genres[item], numpy.random.random((_k2, 1)) / numpy.sqrt(_k2) * w3)
_pi.setdefault(item, numpy.random.random((_k2, 1)) / numpy.sqrt(_k2))
_bd.setdefault(genres[item], 0)
if _svd_pp:
y.setdefault(item, numpy.zeros((_k, 1)))
_avr += rating
_tot += 1
_movie_set = set(_movie_list)
_avr /= _tot
for step in xrange(steps):
rmse_sum = 0
mae_sum = 0
for user, items in _user_items.iteritems():
samples = items if not ratio else __random_negative_sample(items, ratio)
for item, rating in samples.iteritems():
eui = rating - __predict(user, item)
rmse_sum += eui ** 2
mae_sum += abs(eui)
if _bias:
_bu[user] += gamma * (eui - Lambda * _bu[user])
_bi[item] += gamma * (eui - Lambda * _bi[item])
if _svd:
release_date = release[item]
f2 = genres[item]
timestamp = _times[item]
if _weight1 != 0:
_pu[user], _qi[item] = _pu[user] + gamma * (eui * _qi[item] - Lambda * _pu[user]), _qi[
item] + gamma * (eui * (_pu[user] + _z[user] if _svd_pp else _pu[user]) - Lambda * _qi[item])
if _weight2 != 0:
_pu1[user], _qi1[release_date] = _pu1[user] + gamma1 * (eui * _qi1[release_date] - Lambda * _pu1[user]), _qi1[
release_date] + gamma1 * (eui * (_pu1[user] + _z[user] if _svd_pp else _pu1[user]) - Lambda * _qi1[release_date])
if _weight3 != 0:
_pu2[user], _qi2[f2] = _pu2[user] + gamma2 * (eui / aux[f2] * _qi2[f2] - Lambda * _pu2[user]), _qi2[
f2] + gamma2 / aux[f2] * (eui * (_pu2[user] + _z[user] if _svd_pp else _pu2[user]) - Lambda * _qi2[f2])
# _bd[f2] += gamma * (eui - Lambda * _bd[f2])
if pop:
days = compute_days(timestamp, release_date)
_pi[item], _qt[days] = _pi[item] + gamma2 * (eui * _qt[days] - Lambda * _pi[item]), _qt[
days] + gamma2 * (eui * (_pi[item] + _z[user] if _svd_pp else _pi[item]) - Lambda * _qt[days])
gamma *= slow_rate
gamma1 *= slow_rate
gamma2 *= slow_rate
print "step: %s, rmse: %s, mae: %s" % (step + 1, numpy.sqrt(rmse_sum / _tot), mae_sum / _tot)
def __random_negative_sample(items, ratio=1):
ret = {}
for item in items.iterkeys():
ret[item] = 1
n = 0
items_len = len(items)
for _ in xrange(items_len * ratio * 2):
item = _movie_list[int(numpy.random.random() * _tot)]
if item in ret:
continue
ret[item] = 0
n += 1
if n > items_len * ratio:
break
return ret
def __predict(user, item, printF=False):
rui = 0
if _bias:
_bu.setdefault(user, 0)
_bi.setdefault(item, 0)
rui += _avr + _bu[user] + _bi[item]
if _svd:
release_date = release[item]
f2 = genres[item]
aux.setdefault(f2, 1)
timestamp = _times[item]
_pu.setdefault(user, numpy.zeros((_k, 1)))
_qi.setdefault(item, numpy.zeros((_k, 1)))
_pu1.setdefault(user, numpy.zeros((_k1, 1)))
_qi1.setdefault(release_date, numpy.zeros((_k1, 1)))
_pu2.setdefault(user, numpy.zeros((_k2, 1)))
_qi2.setdefault(f2, numpy.zeros((_k1, 1)))
_qi3.setdefault(f2, numpy.zeros((_k1, 1)))
# days=compute_days(timestamp, release_date)
# _pi.setdefault(item, numpy.zeros((_k1, 1)))
# _qt.setdefault(days, numpy.zeros((_k1, 1)))
s1 = 0
s2 = 0
s3 = 0
s4 = 0
if _weight1 != 0:
s1 = numpy.sum(_pu[user] * _qi[item])
if _weight2 != 0:
s2 = numpy.sum(_pu1[user] * _qi1[release_date])
if _weight3 != 0:
s3 = numpy.sum(_pu2[user] * _qi2[f2]) / numpy.sqrt(aux[f2])
# s4=_bd[f2]
# s4=numpy.sum(_pi[item] * _qt[days])
if printF:
print(user, s1, s2, s3)
return s1 + s2 + s3 + rui + s4
def recommend_explicit(user):
rank = {}
ru = _user_items[user]
for item in _movie_set:
if item in ru:
continue
rank[item] = __predict(user, item)
return rank.iteritems()
def recommend_implicit(user, n):
rank = {}
ru = _user_items[user]
for item in _movie_set:
if item in ru:
continue
rank[item] = __predict(user, item)
return heapq.nlargest(n, rank.iteritems(), key=operator.itemgetter(1))
def compute_days(timestamp, release_date):
days = (timestamp - time.mktime(datetime.datetime.strptime(str(release_date), "%Y%m%d").timetuple())) / 86400
if days < 0:
days = 0
return days
|
# to determine the "determinant" of matrix
from numpy import linalg as LA
import numpy as np
a = np.array([[1, 0], [0, 1]])
print(a)
print(LA.det(a)) # similar ar np.linalg.det(a)
|
"""
墙壁上挂着一个圆形的飞镖靶。现在请你蒙着眼睛向靶上投掷飞镖。
投掷到墙上的飞镖用二维平面上的点坐标数组表示。飞镖靶的半径为 r 。
请返回能够落在 任意 半径为 r 的圆形靶内或靶上的最大飞镖数。
示例 1:
输入:points = [[-2,0],[2,0],[0,2],[0,-2]], r = 2
输出:4
解释:如果圆形的飞镖靶的圆心为 (0,0) ,半径为 2 ,所有的飞镖都落在靶上,此时落在靶上的飞镖数最大,值为 4 。
示例 2:
输入:points = [[-3,0],[3,0],[2,6],[5,4],[0,9],[7,8]], r = 5
输出:5
解释:如果圆形的飞镖靶的圆心为 (0,4) ,半径为 5 ,则除了 (7,8) 之外的飞镖都落在靶上,此时落在靶上的飞镖数最大,值为 5 。
示例 3:
输入:points = [[-2,0],[2,0],[0,2],[0,-2]], r = 1
输出:1
示例 4:
输入:points = [[1,2],[3,5],[1,-1],[2,3],[4,1],[1,3]], r = 2
输出:4
提示:
1 <= points.length <= 100
points[i].length == 2
-10^4 <= points[i][0], points[i][1] <= 10^4
1 <= r <= 5000
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/maximum-number-of-darts-inside-of-a-circular-dartboard
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
import copy
from math import sqrt
""" 大体思路是,两点定圆,遍历所有两点,得到所有圆心列表。用圆心列表求的所有点对应的距离得到count"""
class Solution:
def numPoints(self, points: list, r: int) -> int:
# 保护输入
if len(points) == 1:
return 1
counts = []
circle_list = self.gen_circle_run(points, r)
for point in circle_list:
counts.append(self.count_in_circle(point, points, r))
if not counts:
return 1
print(max(counts))
return max(counts)
def gen_circle_run(self, points, r):
circle_list = []
my_points = copy.deepcopy(points)
while len(my_points) >= 2:
point_a = my_points.pop(0)
for point in my_points:
# print(self.gen_circle(point_a, point, r))
a, b = self.gen_circle(point_a, point, r)
if a and a not in circle_list:
circle_list.append(a)
if b and b not in circle_list:
circle_list.append(b)
# for point in circle_list:
# print(point)
return circle_list
def gen_circle(self, point_a, point_b, r):
"""以a点为圆心,b为圆心分别做圆相交,两个交点就是圆心坐标,需要推导以下公式"""
try:
if point_a[0] == point_b[0]:
y0 = y1 = (point_a[1] + point_b[1]) / 2
deltay = (y0 - point_a[1]) ** 2
deltax = sqrt(r ** 2 - deltay)
x0 = point_b[0] - deltax
x1 = point_b[0] + deltax
else:
C1 = (point_b[0] ** 2 + point_b[1] ** 2 - point_a[0] ** 2 - point_a[1] ** 2) / 2 / (point_b[0] - point_a[0])
C2 = (point_b[1] - point_a[1]) / (point_b[0] - point_a[0])
A = 1 + C2 ** 2
B = 2 * (point_a[0] - C1) * C2 - 2 * point_a[1]
C = (point_a[0] - C1) ** 2 + point_a[1] ** 2 - r ** 2
y0 = (-B + sqrt(B * B - 4 * A * C)) / (2 * A)
y1 = (-B - sqrt(B * B - 4 * A * C)) / (2 * A)
x0 = C1 - C2 * y0
x1 = C1 - C2 * y1
except:
return None,None
return [x0, y0], [x1, y1]
def count_in_circle(self, point, points, r):
count = 0
for i in points:
distance2 = (point[0] - i[0]) ** 2 + (point[1] - i[1]) ** 2
if distance2 <= r ** 2:
count += 1
return count
if __name__ == '__main__':
points = [[-2, 0], [2, 0], [0, 2], [0, -2]]
r = 1
# points = [[-3, 0], [3, 0], [2, 6], [5, 4], [0, 9], [7, 8]]
# r = 5
# points = [[1, 2], [3, 5], [1, -1], [2, 3], [4, 1], [1, 3]]; r = 2
solution = Solution()
solution.numPoints(points, r)
|
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
import pyspark
from pyspark import SparkContext, SparkConf
from pyspark.sql import SparkSession
from pyspark.sql import functions as F
from pyspark.sql.functions import *
from pyspark.sql import *
from pyspark.sql.types import *
from pyspark.ml.feature import StringIndexer,OneHotEncoder,VectorAssembler,IndexToString
from pyspark.ml import Pipeline
from pyspark.ml.evaluation import MulticlassClassificationEvaluator,BinaryClassificationEvaluator
from pyspark.ml.classification import LogisticRegression,RandomForestClassifier
from pyspark.ml.tuning import ParamGridBuilder,CrossValidator
from sklearn.metrics import classification_report
from sklearn.metrics import roc_curve
spark = SparkSession.builder \
.appName("test") \
.enableHiveSupport() \
.getOrCreate()
sc = spark.sparkContext
sample_dataset = [
(0, "unknow", 37, 10, "no", 3, 18, 7, 4),
(0, "female", 27, 4, "no", 4, 14, 6, 4),
(0, "female", 32, 15, "yes", 1, 12, 1, 4),
(0, None, 57, 15, "yes", 5, 18, 6, 5),
(0, "null", 22, 0.75, "no", 2, 17, 6, 3),
(0, "female", 32, 1.5, "no", 2, 17, 5, 5),
(0, "female", 22, 0.75, "no", 2, 12, 1, 3),
(0, "", 57, 15, "yes", 2, 14, 4, 4),
(0, "female", 32, 15, "yes", 4, 16, 1, 2),
(0, "male", 22, 1.5, "no", 4, 14, 4, 5),
(0, "male", 37, 15, "yes", 2, 20, 7, 2),
(0, "male", 27, 4, "yes", 4, 18, 6, 4),
(0, "male", 47, 15, "yes", 5, 17, 6, 4),
(0, "female", 22, 1.5, "no", 2, 17, 5, 4),
(0, "female", 27, 4, "no", 4, 14, 5, 4),
(0, "female", 37, 15, "yes", 1, 17, 5, 5),
(0, "female", 37, 15, "yes", 2, 18, 4, 3),
(0, "female", 22, 0.75, "no", 3, 16, 5, 4),
(0, "female", 22, 1.5, "no", 2, 16, 5, 5),
(0, "female", 27, 10, "yes", 2, 14, 1, 5),
(1, "female", 32, 15, "yes", 3, 14, 3, 2),
(1, "female", 27, 7, "yes", 4, 16, 1, 2),
(1, "male", 42, 15, "yes", 3, 18, 6, 2),
(1, "female", 42, 15, "yes", 2, 14, 3, 2),
(1, "male", 27, 7, "yes", 2, 17, 5, 4),
(1, "male", 32, 10, "yes1", 4, 14, 4, 3),
(1, "male", 47, 15, "yes1", 3, 16, 4, 2),
(0, "male", 37, 4, "yes1", 2, 20, 6, 4)
]
columns = ["labels", "gender", "age", "label", "children", "religiousness", "education", "occupation", "rating"]
pdf = pd.DataFrame(sample_dataset, columns=columns)
data = spark.createDataFrame(pdf)
data.show(5)
#缺失计数
#data.agg(*[F.sum(F.when(df[c].isin(np.nan,"null"), 1).when(F.isnull(df[c]), 1).otherwise(0)).alias(c) for c in df.columns])
cleanStringUDF = F.udf(lambda x : "male" if x in ("null","unknow","","None") or x == None else x)
#splitCalUDF = F.udf(lambda x : float(x.split("*")[0])*float(x.split("*")[1]), returnType=StringType())
#缺失处理
data = data.withColumn("gender",cleanStringUDF("gender"))
# .withColumn("religiousness",splitCalUDF("religiousness"))
#类型处理
feature1_list = ['age','label','religiousness','education','occupation','rating']
feature2_list = ['gender','children']
for c in feature1_list:
data = data.withColumn(c, data[c].cast(DoubleType()))
indexers = [StringIndexer(inputCol=c, outputCol='{0}_indexed'.format(c),handleInvalid='error') for c in feature2_list]
encoders = [OneHotEncoder(dropLast=True,inputCol=indexer.getOutputCol(),
outputCol="{0}_encoded".format(indexer.getOutputCol())) for indexer in indexers]
assembler = VectorAssembler(inputCols=feature1_list+[encoder.getOutputCol() for encoder in encoders],outputCol="features")
feature_pipeline = Pipeline(stages=indexers + encoders + [assembler])
feature_model = feature_pipeline.fit(data)
#index y
#分训练和测试
#labelIndexer = StringIndexer(inputCol = "affairs", outputCol = "indexedLabel").fit(df)
#data = labelIndexer.transform(df)
Data = feature_model.transform(data)
print("所有的特征名称:{0}".format(Data.columns))
train_data, test_data = Data.randomSplit([0.7, 0.3],seed=1994)
print("训练样本数:%d\n测试样本数:%d"%(train_data.count(),test_data.count()))
#随机森林
rf = RandomForestClassifier(numTrees=100, featuresCol='features', labelCol="labels", seed=7).fit(train_data)
Predictions = rf.transform(test_data)
#f1 = MulticlassClassificationEvaluator(predictionCol='prediction',labelCol='affairs',metricName='f1',metricLabel=1).evaluate(lrPredictions)
#accuracy = MulticlassClassificationEvaluator(predictionCol='prediction',labelCol='affairs',metricName='accuracy',metricLabel=1).evaluate(lrPredictions)
#weightedPrecision = MulticlassClassificationEvaluator(predictionCol='prediction',labelCol='affairs',metricName='weightedPrecision',metricLabel=1).evaluate(lrPredictions)
#weightedRecall = MulticlassClassificationEvaluator(predictionCol='prediction',labelCol='affairs',metricName='weightedRecall',metricLabel=1).evaluate(lrPredictions)
#分类报告
report = Predictions.select("prediction","labels","features","probability").toPandas()
print(classification_report(y_true=report['labels'],y_pred=report['prediction']))
# 使用混淆矩阵评估模型性能[[TP,FN],[TN,FP]]
TP = Predictions.filter(Predictions['prediction'] == 1).filter(Predictions['labels'] == 1).count()
FN = Predictions.filter(Predictions['prediction'] == 0).filter(Predictions['labels'] == 1).count()
TN = Predictions.filter(Predictions['prediction'] == 0).filter(Predictions['labels'] == 0).count()
FP = Predictions.filter(Predictions['prediction'] == 1).filter(Predictions['labels'] == 0).count()
# 计算查准率 TP/(TP+FP)
precision = TP/(TP+FP)
# 计算召回率 TP/(TP+FN)
recall = TP/(TP+FN)
# 计算F1值 (TP+TN)/(TP+TN+FP+FN)
F1 =(2 * precision * recall)/(precision + recall)
# 计算accuracy
accuracy = (TP+TN)/(TP+TN+FP+FN)
auc = BinaryClassificationEvaluator(labelCol='labels').evaluate(Predictions)
print(" f1:%1.2f\n accuracy%1.2f\n Precision:%1.2f\n Recall:%1.2f\n auc:%1.2f " % (F1, accuracy, precision, recall, auc)) |
########## ELASTICSEARCH CONFIGURATION
from elasticsearch import Elasticsearch, RequestsHttpConnection
from urlparse import urlparse
import os
ES_URL = os.environ.get('SEARCHBOX_URL') or 'http://127.0.0.1:9200/'
if not urlparse(ES_URL).port:
ES_URL += ':80'
ES_CLIENT = Elasticsearch([ES_URL], connection_class=RequestsHttpConnection)
########## END ELASTICSEARCH CONFIGURATION
|
/*
Nome: El Dorado
ID: 1645
Resposta: Accepted
Linguagem: Python 3 (Python 3.4.3) [+1s]
Tempo: 0.688s
Tamanho: 735 Bytes
Submissao: 05/06/16 20:03:38
*/
# -*- coding: utf-8 -*-
import functools
def cache(func):
cache.cache = dict()
def cached(*args):
if args not in cache.cache:
result = func(*args)
cache.cache[args] = result
return cache.cache[args]
else:
return cache.cache[args]
return cached
@cache
def consec_seq_util(arr, n, k):
if k == 1:
return 1
ans = 0
for i in range(n):
if arr[0] < arr[i]:
ans += consec_seq_util(arr[i:], n-i, k-1)
return ans
def consec_seq(arr, n, k):
ans = 0
for i in range(n-k+1):
ans += consec_seq_util(arr[i:], n-i, k)
return ans
while True:
N, K = (int(i) for i in input().split())
if N == 0:
break
arr = tuple(int(i) for i in input().split())
print(consec_seq(arr, N, K)) |
import os
import json
from datetime import datetime
from flask import Flask
from flask import render_template, url_for, request, jsonify
from flask import send_from_directory
from flask.ext.sqlalchemy import SQLAlchemy
from werkzeug.utils import secure_filename
ALLOWED_EXTENSIONS = ['JPEG', 'JPG', 'PNG', 'GIF']
app = Flask(__name__)
# Setup routing for static files
app.jinja_env.globals['static'] = (
lambda filename: url_for('static', filename=filename))
# Setup folder paths for uploads and SQL
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + os.getcwd() + '/tmp/store.db'
app.config['UPLOAD_FOLDER'] = os.path.join(os.getcwd(), 'tmp/uploads')
# Instantiate db session
db = SQLAlchemy(app)
# Models
class Zone(db.Model):
id = db.Column(db.Integer, primary_key=True)
label = db.Column(db.String(80))
shape = db.Column(db.Text) # store as a json blob
extras = db.Column(db.Text) # store extra stuff as a blob
floor_id = db.Column(db.Integer, db.ForeignKey('floor.id'))
floor = db.relationship('Floor',
backref=db.backref('zones', lazy='dynamic'))
def __init__(self, label, shape, floor, extras=None):
self.shape = shape
self.label = label
self.floor = floor
if extras is None:
self.extras = '{}'
def __repr__(self):
return '<Zone %r>' % self.label
def toObj(self):
d = {}
d['label'] = self.label
d['shape'] = json.loads(self.shape)
return d
class Floor(db.Model):
id = db.Column(db.Integer, primary_key=True)
label = db.Column(db.String(50))
img_name = db.Column(db.String(80))
extras = db.Column(db.Text) # store extra stuff as a blob
def __init__(self, label, img_name, extras=None):
self.label = label
self.img_name = img_name
if extras is None:
self.extras = ''
def __repr__(self):
return '<Floor %r>' % self.label
def toObj(self):
z = []
for zone in self.zones:
z.append(zone.toObj())
return {
'label': self.label,
'imgname': self.img_name,
'zones': z
}
# Misc
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].upper() in ALLOWED_EXTENSIONS
# Routes
@app.route('/')
def list_floors():
floors = Floor.query.all()
return render_template('floor_list.html', floors=floors)
@app.route('/create', methods=['GET', 'POST'])
def create_new_floor():
if request.method == 'GET':
return render_template('create.html')
else:
# Otherwise it's a POST request
try:
data = json.loads(request.data)
# request.data should be a Floor JSON object
# see static/js/models.js
fid = process_floor_json(data)
return jsonify(success=True, floorId=fid)
except:
return jsonify(success=False, msg='Invalid Floor JSON')
def process_floor_json(data):
floor = Floor(data['label'], data['img'])
db.session.add(floor)
for zone in data['zones']:
shape_json = json.dumps(zone['shape'])
z = Zone(zone['label'], shape_json, floor, zone['extras'])
db.session.add(z)
db.session.commit()
return floor.id
@app.route('/view/<floor_id>')
def get_floor_data(floor_id):
try:
floor = Floor.query.filter(Floor.id == int(floor_id)).first()
if floor:
return render_template('view.html', floor=floor)
else:
return render_template('not_found.html')
except:
return render_template('not_found.html')
@app.route('/fetch/floor/<floor_id>')
def fetch_floor_data(floor_id):
try:
floor = Floor.query.filter(Floor.id == int(floor_id)).first()
if floor:
return jsonify(success=True, floor=floor.toObj(), msg='')
else:
return jsonify(success=False, msg='Could not find floor')
except:
return jsonify(success=False, msg='Could not find floor')
@app.route('/up', methods=['POST'])
def upload_file():
if request.method == 'POST':
filename = secure_filename(request.headers.get('X-File-Name'))
if not allowed_file(filename):
return jsonify(success=False, msg='Invalid image format')
filename = datetime.now().strftime('%Y%m%d%H%M%S%f') + '-' + filename
try:
f = open(os.path.join(app.config['UPLOAD_FOLDER'], filename), 'w')
f.write(request.data)
f.close()
return jsonify(success=True, imgname=filename)
except:
return jsonify(success=False, msg='Could not save file')
@app.route('/uploads/<filename>')
def uploaded_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename)
if __name__ == '__main__':
app.run(debug=True, port=5000)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.