code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is an exercise for practising socket programming. Learning the basic concept of
how a socket send and receive data from a client side. Below are functions that I have implementated in the
Ftp Server and Ftp Client script.
Ftp Server function:
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1) put function
2) get function
3) server_authentication
4) switch directory function
5) remove a file function
6) display the file or directory
7) Get a md5 function
8) show progress_bar
"""
import socketserver
import json
import time
import os
import hashlib
import subprocess
from s13.Day09.ftp_server import user_info
from s13.Day09.bin.context import FTP_SERVER
class Ftp_server(socketserver.BaseRequestHandler):
def handle(self):
print(self.request,self.client_address,self.server)
conn = self.request
authentication = conn.recv(1024) #{"password": "ken123", "username": "ken"}
# obj = __import__("Ftp_server_function", fromlist=True)
f = Ftp_server_function()
auth_data = json.loads(authentication.decode())
is_auth = f.server_auth(auth_data,conn)
time.sleep(1) #TODO Instead of sleep, it can receive a started message from client side
if is_auth:
send_data = "Hello {}! Welcome to MyFTP Server".format(auth_data['username'].upper())
conn.sendall(bytes(send_data, encoding='utf-8'))
while is_auth:
try:
recv_data = conn.recv(1024)
if len(recv_data) == 0: break
print('[{}] {} says: {}'.format(time.ctime(), self.client_address, recv_data.decode()))
task_msg = json.loads(recv_data.decode())
task_action = task_msg.get('action')
command = 'stask_%s' %task_action
is_exists = hasattr(f,command)
if is_exists:
func = getattr(f,command)
func(task_msg,conn)
else:
print("task action is not supported", task_action)
except Exception:
break
class Ftp_server_function():
def stask_put(self,task_msg,conn):
file_name = task_msg.get('filename')
file_size = task_msg.get('filesize')
src_md5 = task_msg.get('md5')
print('Server::put::ok'.center(50,'-'))
if os.path.isfile(file_name):
server_response = {'status': 201} #file_exists, overrided? (Y/N)
else:
server_response = {'status': 200} #file does not exists
conn.send(bytes(json.dumps(server_response), encoding='utf-8'))
# pending client side answer to override the file
recv_data = conn.recv(1024) #{'status': 301}
overrided = json.loads(recv_data.decode())
dest_name = FTP_SERVER +'/'+ file_name
server_data_size = 0
if overrided['status'] == 300: #300 means overrided
file_open_status = 'wb'
self._file_upload(file_size,dest_name,file_open_status,conn,server_data_size)
else:
file_open_status = 'ab'
server_data_size = os.stat(dest_name).st_size
self._file_upload(file_size, dest_name, file_open_status, conn, server_data_size)
dest_md5 = self._get_md5(dest_name)
if src_md5 == dest_md5:
print("file_transfer::%s::md5-value-matched" %(self._get_status('succeed')))
file_verified = '1000'
else:
print("file_transfer::%s::md5-value-unmatched" %(self._get_status('failed')))
file_verified = '1001'
conn.send(bytes(file_verified, encoding='utf-8'))
def _file_upload(self, file_size, dest_file,file_open_status, conn,server_data_size):
f = open(dest_file, file_open_status)
f.seek(server_data_size)
ready_tag = 'Ready|%s' % server_data_size
conn.send(bytes(ready_tag, encoding='utf-8'))
recv_size = server_data_size
while recv_size < file_size:
recv_data = conn.recv(4096)
f.write(recv_data)
recv_size += len(recv_data)
# print("filesize: %s recv_size: %s" % (file_size, recv_size))
f.close()
def stask_get(self,task_msg,conn):
file_name = task_msg.get('filename')
print('Server::get::ok'.center(50, '-'))
file_size = os.stat(file_name).st_size
dest_md5 = self._get_md5(file_name)
ready_msg = {'file_name': file_name, 'file_size': file_size, 'message': 'Ready','md5':dest_md5}
json.dumps(ready_msg)
conn.sendall(bytes(json.dumps(ready_msg),encoding='utf-8'))
feeback = conn.recv(1024) #Started
if str(feeback,encoding='utf-8') == 'Started':
time.sleep(0.5)
with open(file_name, 'rb') as f:
for line in f:
conn.sendall(line)
file_verified = conn.recv(1024)
if str(file_verified,encoding='utf-8') == '1000':
print("file_transfer::%s::md5-value-matched" % (self._get_status('succeed')))
else:
print("file_transfer::%s::md5-value-unmatched" % (self._get_status('failed')))
def stask_cd(self,task_msg,conn):
send_data_msg = {} # {'message':'Permission denied','has_permission':False}
task_action = task_msg.get('action') #{'action':'cd',"directory":directory,'username':uname}
target_dir = task_msg.get('directory')[1]
uname = task_msg.get('username')
home_dir = user_info.user[uname]['home']
has_permission = target_dir.startswith(home_dir)
if has_permission:
print('Server::switch::ok'.center(50, '-'))
os.chdir(target_dir)
cwd_path = os.getcwd()
if cwd_path:
send_data_msg['message'] = "switch_directory:{}" .format(target_dir)
send_data_msg['has_permission'] = True
else:
send_data_msg['message'] = "switch_directory:{}".format(target_dir)
send_data_msg['has_permission'] = False
conn.sendall(bytes(json.dumps(send_data_msg),encoding='utf-8'))
def stask_ls(self,task_msg,conn):
task_action = task_msg.get('action')
directory = task_msg.get('directory')
print('Server::list::ok'.center(50, '-'))
if directory:
list_command = task_action + " " + directory
else:
list_command = task_action
cmd_data = subprocess.Popen(list_command,shell=True, stdout=subprocess.PIPE)
if cmd_data:
send_data = cmd_data.stdout.read()
else:
send_data = bytes("The command does not supported, input >> help for cmd list",encoding='utf-8')
ready_tag = "Ready|%s" %(len(send_data)) #Ready|4096
conn.sendall(bytes(ready_tag,encoding='utf-8'))
started_flag=conn.recv(1024)
started = str(started_flag,encoding='utf-8')
if started == 'Started':
conn.sendall(send_data)
def stask_rm(self,task_msg, conn):
task_action = task_msg.get('action')
file_name = task_msg.get('filename')
print('Server::delete::ok'.center(50, '-'))
rm_command = task_action + " " + file_name
cmd_data = subprocess.Popen(rm_command,shell=True, stdout=subprocess.PIPE)
if cmd_data:
data = "file_removed::%s" %(self._get_status('succeed'))
send_data = bytes(data,encoding='utf-8')
else:
send_data = bytes("The command does not supported, input >> help for cmd list",encoding='utf-8')
conn.sendall(send_data)
@staticmethod
def server_auth(task_msg,conn):
# {"username": "ken", "password": "ken123"}
username = task_msg.get('username')
password = task_msg.get('password')
con1 = username in user_info.user.keys()
con2 = password == user_info.user[username]['password']
home_dir = user_info.user[username]['home']
if con1 and con2:
is_auth = True
else:
is_auth = False
os.chdir(home_dir)
home_dir = os.getcwd()
authentication = {"is_auth":is_auth,"home_dir":home_dir}
conn.sendall(bytes(json.dumps(authentication), encoding='utf-8'))
return is_auth
@classmethod
def _get_status(cls,status):
ret = None
if status == 'failed':
ret = '\033[31;1m%s\033[0m' % ('failed')
elif status == 'succeed':
ret = '\033[32;1m%s\033[0m' % ('succeed')
return ret
def _get_md5(self,file):
if os.path.exists(file) == False:
return None
md5 = hashlib.md5()
with open(file, 'rb') as f:
for line in f:
md5.update(line)
return md5.hexdigest()
|
jcchoiling/learningPython
|
s13/Day09/ftp_server/ftp_server.py
|
Python
|
gpl-3.0
| 8,848
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2016-08-10 13:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('dcim', '0015_rack_add_u_height_validator'),
]
operations = [
migrations.AddField(
model_name='module',
name='manufacturer',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, related_name='modules', to='dcim.Manufacturer'),
),
]
|
Alphalink/netbox
|
netbox/dcim/migrations/0016_module_add_manufacturer.py
|
Python
|
apache-2.0
| 595
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# get-chromium - A Python script to retrieve the latest Chromium snapshots.
# git repository: https://github.com/iceTwy/get-chromium.git
# website: https://github.com/iceTwy/get-chromium
#
# author: iceTwy <icetwy@icetwy.re> (icetwy.re)
#
# LICENSE
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# For more information, please refer to <http://unlicense.org/>
#
import urllib
import urllib2
import sys
import platform
import os
def BuildURLFromOS():
osString = ['Mac', 'Win', 'Linux', 'Linux_x64', 'Linux_ARM_Cross-Compile', 'Android']
userOS = platform.system()
userArch = platform.machine()
if userOS == 'Darwin':
osStringAppend = osString[0]
dlfile = 'chrome-mac.zip'
elif userOS == 'Windows':
osStringAppend = osString[1]
WinSnapshotChoice = raw_input("Would you like to download an installable revision or portable revision (no installation required)? (install/portable) ")
if WinSnapshotChoice != 'install' and WinSnapshotChoice != 'portable':
sys.exit()
elif WinSnapshotChoice == 'install':
dlfile = 'mini_installer.exe'
else:
dlfile = 'chrome-win32.zip'
elif userOS == 'Linux':
try:
import android
osStringAppend = osString[5]
dlfile = 'chrome-android.zip'
except ImportError:
dlfile = 'chrome-linux.zip'
if userArch == 'x86_64':
osStringAppend = osString[3]
elif 'arm' in userArch:
osStringAppend = osString[4]
else:
osStringAppend = osString[2]
else:
print 'Platform not supported'
sys.exit()
return osStringAppend, dlfile
def FindSnapshotRevision(osStringAppend):
try:
getSnapshotRev = urllib2.urlopen('https://commondatastorage.googleapis.com/chromium-browser-snapshots/' + osStringAppend + '/LAST_CHANGE', timeout=30)
except urllib2.URLError:
print "Couldn't reach the requested URL.\nCheck your Internet connection settings.\nExiting..."
sys.exit()
SnapshotRev = getSnapshotRev.read()
print "Available Chromium snapshot revision: %s" % SnapshotRev
return SnapshotRev
def CheckDirsFiles(osStringAppend):
SettingsDir = os.path.expanduser("~/.get-chromium") # default directory to save get-chromium settings
DLDirFile = os.path.expanduser("~/.get-chromium/DOWNLOAD_DIR") # file containing userDLDir
RevisionFile = os.path.expanduser("~/.get-chromium/LAST_INSTALL") # file containing SnapshotRev
#SettingsDir check
check_SettingsDir = os.path.exists(SettingsDir)
if check_SettingsDir == False:
os.mkdir(SettingsDir)
else:
pass
#RevisionFile check
check_RevisionFile = os.path.exists(RevisionFile)
if check_RevisionFile == False:
print "\nWould you like to save the revision number of the last downloaded snapshot to a file?\nThis will avoid redownloading Chromium if you already have the latest revision."
createNewFile = raw_input("Remember the last downloaded revision? (y/n) ")
if createNewFile != 'y' and createNewFile != 'n':
sys.exit()
elif createNewFile == 'y':
touchRevisionFile = open(RevisionFile, 'w+') # simply creating the file, it's updated with logNewInstall()
touchRevisionFile.close()
print "Revision file successfully created!"
else:
print "Revision file not created. Revision number will not be tracked."
else:
pass
#DLDirFile check
check_DLDirFile = os.path.exists(DLDirFile)
if check_DLDirFile == False:
create_DLDirFile = raw_input("\nYou haven't defined a default download directory. Would you like to select one? (y/n) ")
if create_DLDirFile != 'y' and create_DLDirFile != 'n':
sys.exit()
if create_DLDirFile == 'y':
choose_DLDir = raw_input("Choose a default download directory (e.g. ~/get-chromium): ")
if not choose_DLDir:
userDLDir = os.getcwd()
print "Snapshot will be downloaded to the current directory."
else:
touch_DLDirFile = open(DLDirFile, 'w+')
touch_DLDirFile.write(choose_DLDir)
userDLDir = touch_DLDirFile.read()
touch_DLDirFile.close()
print "Default download directory successfully chosen!"
else:
userDLDir = os.getcwd()
print "Snapshot will be downloaded to the current directory."
else:
open_DLDirFile = open(DLDirFile, 'r')
userDLDir = open_DLDirFile.read()
if not userDLDir:
choose_DLDir = raw_input("Choose a default download directory (e.g. ~/get-chromium): ")
if not choose_DLDir:
userDLDir = os.getcwd()
open_DLDirFile.close()
print "Snapshot will be downloaded to the current directory."
else:
open_DLDirFile.close()
touch_DLDirFile = open(DLDirFile, 'w')
touch_DLDirFile.write(choose_DLDir)
touch_DLDirFile.close()
else:
open_DLDirFile.close()
#Correctly format the path
if osStringAppend == 'Win':
if userDLDir[-1:] != u"\u005C": # backslash
userDLDir = userDLDir + u"\u005C"
else:
pass
else:
if userDLDir[-1:] != u"\u002F": # slash
userDLDir = userDLDir + u"\u002F"
else:
pass
return RevisionFile, userDLDir
def CheckPriorRevision(RevisionFile, SnapshotRev, userDLDir, dlfile):
check_RevisionFile = os.path.exists(RevisionFile)
SnapshotInDLDir = userDLDir + dlfile
check_SnapshotInDLDir = os.path.exists(SnapshotInDLDir)
if check_SnapshotInDLDir == True and check_RevisionFile == True:
openRevisionFile = open(RevisionFile, 'r')
checkRevision = openRevisionFile.read()
if checkRevision:
print "Downloaded Chromium snapshot revision: %s" % checkRevision
if SnapshotRev == checkRevision:
print "You have already downloaded the latest Chromium snapshot.\nExiting..."
sys.exit()
elif SnapshotRev > checkRevision:
print "The last revision you have downloaded is outdated.\nUpdating..."
elif SnapshotRev < checkRevision:
print "The saved revision is superior to that of the latest snapshot.\nUpdating..."
else:
print "\nNo revision number was found, but you seem to have a snapshot in your download directory."
choose_reDownload = raw_input("Would you like to (re)download the latest snapshot? (y/n) ")
if choose_reDownload != 'y' and choose_reDownload != 'n':
sys.exit()
elif choose_reDownload == 'n':
sys.exit()
else:
print "Downloading latest snapshot..."
elif check_SnapshotInDLDir == True and check_RevisionFile == False:
choose_reDownload = raw_input("\nA snapshot is present in your download directory, but the revision file could not be found. Redownload? (y/n) ")
if choose_reDownload != 'y' and choose_reDownload != 'n':
sys.exit()
elif choose_reDownload == 'n':
sys.exit()
else:
print "Redownloading..."
elif check_SnapshotInDLDir == False and check_RevisionFile == True:
choose_reDownload = raw_input("\nThe last downloaded snapshot could not be found in the download directory. Redownload? (y/n) ")
if choose_reDownload != 'y' and choose_reDownload != 'n':
sys.exit()
elif choose_reDownload == 'n':
sys.exit()
else:
print "Redownloading..."
else: #both false
print "\nDownloading..."
def GetSnapshot(osStringAppend, SnapshotRev, dlfile, userDLDir):
SnapshotURL = 'https://commondatastorage.googleapis.com/chromium-browser-snapshots/' + osStringAppend + '/' + SnapshotRev + '/' + dlfile
print "\nDownload URL: %s" % SnapshotURL
try:
reachSnapshotFile = urllib2.urlopen(SnapshotURL, timeout=30)
os.chdir(userDLDir)
downloadSnapshot = urllib.urlretrieve(SnapshotURL, dlfile, reporthook=dlProgress)
reachSnapshotFile.close()
print "\nSaved under %s" % userDLDir + dlfile
except urllib2.URLError:
print "Couldn't retrieve the requested snapshot. Exiting..."
sys.exit()
def dlProgress(count, blockSize, totalSize):
percent = int(count*blockSize*100/totalSize)
sys.stdout.write("\r" + "Downloading Chromium r%s... " % SnapshotRev + " %d%%" % percent)
sys.stdout.flush()
def LogNewInstall(RevisionFile, SnapshotRev):
updateRevisionFile = open(RevisionFile, 'w+')
updateRevisionFile.write(SnapshotRev)
updateRevisionFile.close()
if __name__ == '__main__':
osStringAppend, dlfile = BuildURLFromOS()
SnapshotRev = FindSnapshotRevision(osStringAppend)
RevisionFile, userDLDir = CheckDirsFiles(osStringAppend)
CheckPriorRevision(RevisionFile, SnapshotRev, userDLDir, dlfile)
GetSnapshot(osStringAppend, SnapshotRev, dlfile, userDLDir)
LogNewInstall(RevisionFile, SnapshotRev)
print "Done. Exiting...\r"
|
iceTwy/get-chromium
|
get-chromium.py
|
Python
|
unlicense
| 10,015
|
# vi: ts=8 sts=4 sw=4 et
#
# uri.py: various URI related utilties
#
# This file is part of Draco2. Draco2 is free software and is made available
# under the MIT license. Consult the file "LICENSE" that is distributed
# together with this file for the exact licensing terms.
#
# Draco2 is copyright (c) 1999-2007 by the Draco2 authors. See the file
# "AUTHORS" for a complete overview.
#
# $Revision: 1187 $
import os
import os.path
import re
import stat
# URL/Form encoding
safe_chars = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'abcdefghijklmnopqrstuvwxyz'
'0123456789' '_.-')
def quote_hex(s, safe=''):
"""
Replace potentially unsafe characters in 's' with their %XX hexadecimal
counterparts. You can pass additional safe characters in `safe'.
"""
res = list(s)
safe += safe_chars
for i in range(len(s)):
c = res[i]
if c not in safe:
res[i] = '%%%02X' % ord(c)
return ''.join(res)
def unquote_hex(s):
"""
Change %XX occurences in `s' with their character value.
Does the opposite of quote_url().
"""
lst = s.split('%')
res = [lst[0]]
for s in lst[1:]:
if len(s) >= 2:
try:
c = chr(int(s[:2], 16))
res.append(c + s[2:])
except ValueError:
res.append('%' + s)
else:
res.append('%' + s)
return ''.join(res)
def quote_url(s):
"""URL encode a string."""
if isinstance(s, unicode):
s = s.encode('utf-8')
return quote_hex(s, '/')
def unquote_url(s):
"""Decode an URL encoded string."""
s = unquote_hex(s)
s = s.decode('utf-8')
return s
def quote_form(s):
"""Form encode a string."""
if isinstance(s, unicode):
s = s.encode('utf-8')
s = quote_hex(s, ' ')
s = s.replace(' ', '+')
return s
def unquote_form(s):
"""Decode a form encoded string."""
s = s.replace('+', ' ')
s = unquote_hex(s)
s = s.decode('utf-8')
return s
# URI parsing
re_uri = re.compile('(?:([^:/?]*):)?(?://([^?/]*))?(?:/?([^?]*))(?:\?(.*))?')
def parse_uri(uri):
"""Parse an URI into its components.
The result is a 4-tuple (scheme, host, path, query).
Note: This function only supports the "hier_part" URL format as
defined in RFC2396 section 3. The "opaque_part" format is not
supported.
"""
mobj = re_uri.match(uri)
assert mobj
result = list(mobj.groups())
for i,value in enumerate(result):
if result[i] is None:
result[i] = ''
return tuple(result)
def create_uri(scheme=None, host=None, path=None, query=None):
"""Create an URI from its components."""
uri = ''
if scheme:
uri += '%s:' % scheme
if host:
uri += '//%s' % host
if path:
uri += '/%s' % path
if query:
uri += '?%s' % query
return uri
def parse_path(path):
"""Parse the "path" component of an URI.
The result is a list of path components.
"""
parts = [ unquote_url(pa) for pa in path.split('/') if pa ]
return parts
def create_path(parts):
"""Create a "path" component of an URI.
This function is the reverse of parse_path().
"""
parts = [ quote_url(pa) for pa in parts ]
path = '/'.join(parts)
return path
def parse_query(query):
"""Parse the "query" component of an URI.
The result is a dictionary that maps a string key to a list with
one or more string values.
"""
args = {}
parts = query.split('&')
for pa in parts:
try:
name, value = pa.split('=')
except ValueError:
continue
name = unquote_form(name)
value = unquote_form(value)
try:
args[name].append(value)
except KeyError:
args[name] = [value]
return args
def create_query(args):
"""Create the "query" component of an URI.
This function is the reverse of parse_query().
"""
args = [ '%s=%s' % (quote_form(key), quote_form(value))
for key,value in args.items() ]
query = '&'.join(args)
return query
# URL path resolution
class ResolutionError(Exception):
pass
def resolve_path_uri(path, docroot):
"""Resolves the path part of an URI.
The URI is resolved to the 3-tuple: directory, filename, pathinfo.
The filename component is either empty or a single path component,
and may or may not exist as a physical file. The pathinfo component
consists of zero or more path components.
"""
try:
st = os.stat(docroot)
except OSError:
st = None
if st is None or not stat.S_ISDIR(st.st_mode):
raise ResolutionError, 'Document root does not exist.'
directory = []
subdir = docroot
parts = [ unquote_url(part) for part in path.split('/') if part ]
for i in range(len(parts)):
part = parts[i]
if part in ('.', '..'):
raise ResolutionError, \
'Current or parent directory not allowed in URI.'
subdir = os.path.join(subdir, part)
try:
st = os.stat(subdir)
except OSError:
st = None
if st is None or not stat.S_ISDIR(st.st_mode):
filename = parts[i]
pathinfo = '/'.join(parts[i+1:])
break
directory.append(part)
else:
filename = ''
pathinfo = ''
directory = '/'.join(directory)
return (directory, filename, pathinfo)
def create_path_uri(directory, filename, pathinfo):
"""Create a path URI from a 3-tuple (directory, filename, pathinfo)."""
parts = []
if directory:
parts.append(directory)
if filename:
parts.append(filename)
if pathinfo:
parts += [ part for part in pathinfo.split('/') if part ]
parts = [ quote_url(part) for part in parts ]
path = '/'.join(parts)
return path
|
geertj/draco2
|
draco2/util/uri.py
|
Python
|
mit
| 5,926
|
from collections import OrderedDict, namedtuple
import functools
import numpy as np
from PyQt4 import QtGui
from PyQt4.QtGui import QTreeView, QStandardItemModel, QStandardItem, \
QHeaderView, QItemDelegate
from PyQt4.QtCore import Qt, QSize
import Orange
from Orange.evaluation import *
from Orange.widgets import widget, gui, settings
from Orange.data import DiscreteVariable, Domain
Input = namedtuple("Input", ["learner", "results", "stats"])
def classification_stats(results):
return (AUC(results),
CA(results),
F1(results),
Precision(results),
Recall(results))
classification_stats.headers = ["AUC", "CA", "F1", "Precision", "Recall"]
def regression_stats(results):
return (MSE(results),
RMSE(results),
MAE(results),
R2(results))
regression_stats.headers = ["MSE", "RMSE", "MAE", "R2"]
def is_discrete(var):
return isinstance(var, Orange.data.DiscreteVariable)
class ItemDelegate(QItemDelegate):
def sizeHint(self, *args):
size = super().sizeHint(*args)
return QSize(size.width(), size.height() + 6)
class OWTestLearners(widget.OWWidget):
name = "Test Learners"
description = ""
icon = "icons/TestLearners1.svg"
priority = 100
inputs = [("Learner", Orange.classification.Learner,
"set_learner", widget.Multiple),
("Data", Orange.data.Table, "set_train_data", widget.Default),
("Test Data", Orange.data.Table, "set_test_data")]
outputs = [("Evaluation Results", Orange.evaluation.Results)]
settingsHandler = settings.ClassValuesContextHandler()
#: Resampling/testing types
KFold, LeaveOneOut, Bootstrap, TestOnTrain, TestOnTest = 0, 1, 2, 3, 4
#: Selected resampling type
resampling = settings.Setting(0)
#: Number of folds for K-fold cross validation
k_folds = settings.Setting(10)
#: Number of repeats for bootstrap sampling
n_repeat = settings.Setting(10)
#: Bootstrap sampling p
sample_p = settings.Setting(75)
class_selection = settings.ContextSetting("(None)")
def __init__(self, parent=None):
super().__init__(parent)
self.train_data = None
self.test_data = None
#: An Ordered dictionary with current inputs and their testing
#: results.
self.learners = OrderedDict()
sbox = gui.widgetBox(self.controlArea, "Sampling")
rbox = gui.radioButtons(
sbox, self, "resampling", callback=self._param_changed
)
gui.appendRadioButton(rbox, "Cross validation")
ibox = gui.indentedBox(rbox)
gui.spin(ibox, self, "k_folds", 2, 50, label="Number of folds:",
callback=self.kfold_changed)
gui.appendRadioButton(rbox, "Leave one out")
gui.appendRadioButton(rbox, "Random sampling")
ibox = gui.indentedBox(rbox)
gui.spin(ibox, self, "n_repeat", 2, 50, label="Repeat train/test",
callback=self.bootstrap_changed)
gui.widgetLabel(ibox, "Relative training set size:")
gui.hSlider(ibox, self, "sample_p", minValue=1, maxValue=100,
ticks=20, vertical=False, labelFormat="%d %%",
callback=self.bootstrap_changed)
gui.appendRadioButton(rbox, "Test on train data")
gui.appendRadioButton(rbox, "Test on test data")
rbox.layout().addSpacing(5)
gui.button(rbox, self, "Apply", callback=self.apply)
self.cbox = gui.widgetBox(self.controlArea, "Target class")
self.class_selection_combo = gui.comboBox(self.cbox, self, "class_selection",
items=[],
callback=self._select_class,
sendSelectedValue=True, valueType=str)
gui.rubber(self.controlArea)
self.view = QTreeView(
rootIsDecorated=False,
uniformRowHeights=True,
wordWrap=True,
editTriggers=QTreeView.NoEditTriggers
)
header = self.view.header()
header.setResizeMode(QHeaderView.ResizeToContents)
header.setDefaultAlignment(Qt.AlignCenter)
header.setStretchLastSection(False)
self.result_model = QStandardItemModel()
self.view.setModel(self.result_model)
self.view.setItemDelegate(ItemDelegate())
self._update_header()
box = gui.widgetBox(self.mainArea, "Evaluation Results")
box.layout().addWidget(self.view)
def set_learner(self, learner, key):
if key in self.learners and learner is None:
del self.learners[key]
else:
self.learners[key] = Input(learner, None, ())
self._update_stats_model()
def set_train_data(self, data):
self.error(0)
if data is not None:
if data.domain.class_var is None:
self.error(0, "Train data input requires a class variable")
data = None
self.train_data = data
self.closeContext()
self.class_selection = "(None)"
self.openContext(data.domain.class_var)
self._update_class_selection()
self._update_header()
self._invalidate()
def set_test_data(self, data):
self.error(1)
if data is not None:
if data.domain.class_var is None:
self.error(1, "Test data input requires a class variable")
data = None
self.test_data = data
if self.resampling == OWTestLearners.TestOnTest:
self._invalidate()
def handleNewSignals(self):
self.update_results()
self.commit()
def kfold_changed(self):
self.resampling = OWTestLearners.KFold
self._param_changed()
def bootstrap_changed(self):
self.resampling = OWTestLearners.Bootstrap
self._param_changed()
def _param_changed(self):
self._invalidate()
def update_results(self):
self.warning([1, 2])
self.error(2)
if self.train_data is None:
return
if self.resampling == OWTestLearners.TestOnTest:
if self.test_data is None:
self.warning(2, "Missing separate test data input")
return
elif self.test_data.domain.class_var != \
self.train_data.domain.class_var:
self.error(2, ("Inconsistent class variable between test " +
"and train data sets"))
return
# items in need of an update
items = [(key, input) for key, input in self.learners.items()
if input.results is None]
learners = [input.learner for _, input in items]
self.setStatusMessage("Running")
if self.test_data is not None and \
self.resampling != OWTestLearners.TestOnTest:
self.warning(1, "Test data is present but unused. "
"Select 'Test on test data' to use it.")
# TODO: Test each learner individually
if self.resampling == OWTestLearners.KFold:
results = Orange.evaluation.CrossValidation(
self.train_data, learners, k=self.k_folds, store_data=True
)
elif self.resampling == OWTestLearners.LeaveOneOut:
results = Orange.evaluation.LeaveOneOut(
self.train_data, learners, store_data=True
)
elif self.resampling == OWTestLearners.Bootstrap:
p = self.sample_p / 100.0
results = Orange.evaluation.Bootstrap(
self.train_data, learners, n_resamples=self.n_repeat, p=p,
store_data=True
)
elif self.resampling == OWTestLearners.TestOnTrain:
results = Orange.evaluation.TestOnTrainingData(
self.train_data, learners, store_data=True
)
elif self.resampling == OWTestLearners.TestOnTest:
if self.test_data is None:
return
results = Orange.evaluation.TestOnTestData(
self.train_data, self.test_data, learners, store_data=True
)
else:
assert False
self.results = results
results = list(split_by_model(results))
class_var = self.train_data.domain.class_var
if is_discrete(class_var):
stats = [classification_stats(self.one_vs_rest(res)) for res in results]
else:
stats = [regression_stats(res) for res in results]
self._update_header()
for (key, input), res, stat in zip(items, results, stats):
self.learners[key] = input._replace(results=res, stats=stat)
self.setStatusMessage("")
self._update_stats_model()
def _update_header(self):
headers = ["Method"]
if self.train_data is not None:
if is_discrete(self.train_data.domain.class_var):
headers.extend(classification_stats.headers)
else:
headers.extend(regression_stats.headers)
for i in reversed(range(len(headers),
self.result_model.columnCount())):
self.result_model.takeColumn(i)
self.result_model.setHorizontalHeaderLabels(headers)
def _update_stats_model(self):
model = self.view.model()
for r in reversed(range(model.rowCount())):
model.takeRow(r)
for input in self.learners.values():
name = learner_name(input.learner)
row = []
head = QStandardItem()
head.setData(name, Qt.DisplayRole)
row.append(head)
for stat in input.stats:
item = QStandardItem()
item.setData(" {:.3f} ".format(stat[0]), Qt.DisplayRole)
row.append(item)
model.appendRow(row)
def _update_class_selection(self):
if is_discrete(self.train_data.domain.class_var):
self.cbox.setVisible(True)
values = self.train_data.domain.class_var.values
self.class_selection_combo.clear()
self.class_selection_combo.addItem("(None)")
self.class_selection_combo.addItems(values)
class_index = 0
if self.class_selection != '(None)' and self.class_selection != 0:
class_index = self.train_data.domain.class_var.values.index(self.class_selection)+1
self.class_selection_combo.setCurrentIndex(class_index)
self.previous_class_selection = "(None)"
else:
self.cbox.setVisible(False)
def one_vs_rest(self, res):
if self.class_selection != '(None)' and self.class_selection != 0:
class_ = self.train_data.domain.class_var.values.index(self.class_selection)
actual = res.actual == class_
predicted = res.predicted == class_
return Results(
nmethods=1, domain=self.train_data.domain,
actual=actual, predicted=predicted)
else:
return res
def _select_class(self):
if self.previous_class_selection == self.class_selection:
return
results = list(split_by_model(self.results))
items = [(key, input) for key, input in self.learners.items()]
learners = [input.learner for _, input in items]
class_var = self.train_data.domain.class_var
if is_discrete(class_var):
stats = [classification_stats(self.one_vs_rest(res)) for res in results]
else:
stats = [regression_stats(res) for res in results]
for (key, input), res, stat in zip(items, results, stats):
self.learners[key] = input._replace(results=res, stats=stat)
self.setStatusMessage("")
self._update_stats_model()
self.previous_class_selection = self.class_selection
def _invalidate(self, which=None):
if which is None:
which = self.learners.keys()
all_keys = list(self.learners.keys())
model = self.view.model()
for key in which:
self.learners[key] = \
self.learners[key]._replace(results=None, stats=None)
if key in self.learners:
row = all_keys.index(key)
for c in range(1, model.columnCount()):
item = model.item(row, c)
if item is not None:
item.setData(None, Qt.DisplayRole)
def apply(self):
self.update_results()
self.commit()
def commit(self):
results = [val.results for val in self.learners.values()
if val.results is not None]
if results:
combined = results_merge(results)
combined.learner_names = [learner_name(val.learner)
for val in self.learners.values()]
else:
combined = None
self.send("Evaluation Results", combined)
def learner_name(learner):
return getattr(learner, "name", type(learner).__name__)
def split_by_model(results):
"""
Split evaluation results by models
"""
data = results.data
nmethods = len(results.predicted)
for i in range(nmethods):
res = Orange.evaluation.Results()
res.data = data
res.domain = results.domain
res.row_indices = results.row_indices
res.actual = results.actual
res.predicted = results.predicted[(i,), :]
if getattr(results, "probabilities", None) is not None:
res.probabilities = results.probabilities[(i,), :, :]
if results.models:
res.models = [mf[i] for mf in results.models]
if results.folds:
res.folds = results.folds
yield res
def results_add_by_model(x, y):
def is_empty(res):
return (getattr(res, "models", None) is None
and getattr(res, "row_indices", None) is None)
if is_empty(x):
return y
elif is_empty(y):
return x
assert (x.row_indices == y.row_indices).all()
assert (x.actual == y.actual).all()
res = Orange.evaluation.Results()
res.data = x.data
res.domain = x.domain
res.row_indices = x.row_indices
res.folds = x.folds
res.actual = x.actual
res.predicted = np.vstack((x.predicted, y.predicted))
if getattr(x, "probabilities", None) is not None \
and getattr(y, "probabilities") is not None:
res.probabilities = np.vstack((x.probabilities, y.probabilities))
if x.models is not None:
res.models = [xm + ym for xm, ym in zip(x.models, y.models)]
return res
def results_merge(results):
return functools.reduce(results_add_by_model, results,
Orange.evaluation.Results())
def main():
app = QtGui.QApplication([])
data = Orange.data.Table("iris")
w = OWTestLearners()
w.show()
w.set_train_data(data)
w.set_test_data(data)
w.set_learner(Orange.classification.LogisticRegressionLearner(), 1)
w.set_learner(Orange.classification.MajorityLearner(), 2)
w.handleNewSignals()
return app.exec_()
if __name__ == "__main__":
import sys
sys.exit(main())
|
qusp/orange3
|
Orange/widgets/evaluate/owtestlearners.py
|
Python
|
bsd-2-clause
| 15,291
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2021, Shuup Commerce Inc. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from shuup.apps.provides import override_provides
from shuup.core.models import get_person_contact
from shuup.front.basket import get_basket
from shuup.testing import factories
from shuup.testing.utils import apply_request_middleware
from shuup.utils.django_compat import reverse
from shuup_tests.utils import SmartClient
@pytest.mark.django_db
def test_basket_line_descriptor(rf):
shop = factories.get_default_shop()
user = factories.create_random_user()
supplier = factories.get_default_supplier()
product = factories.create_product("product", shop, supplier, 10)
client = SmartClient()
response = client.post(
path=reverse("shuup:basket"), data={"command": "add", "product_id": product.pk, "quantity": 1}
)
with override_provides(
"front_line_properties_descriptor", ["shuup.testing.line_properties_descriptor.TestLinePropertiesDescriptor"]
):
soup = client.soup(reverse("shuup:basket"))
basket_line_property = soup.find("p", {"class": "basket-line-property"})
assert basket_line_property.find("strong", {"class": "property-name"}).text.strip() == "Type:"
assert basket_line_property.find("span", {"class": "property-value"}).text.strip() == "product"
|
shoopio/shoop
|
shuup_tests/front/test_basket_line_descriptor.py
|
Python
|
agpl-3.0
| 1,512
|
from jinja2 import Environment, StrictUndefined
from pkg_resources import resource_string
def load_template(name):
template_string = resource_string('cob', f'templates/{name}.j2').decode('utf-8')
return Environment(undefined=StrictUndefined).from_string(template_string)
|
getweber/weber-cli
|
cob/utils/templates.py
|
Python
|
bsd-3-clause
| 281
|
import os
import sys
import unittest
import settestpath
import logging
import yum.logginglevels as logginglevels
new_behavior = "NEW_BEHAVIOR" in os.environ.keys()
from yum import YumBase
from yum import transactioninfo
from yum import packages
from yum import packageSack
from yum.constants import TS_INSTALL_STATES, TS_REMOVE_STATES
from cli import YumBaseCli
from yum.rpmsack import RPMDBPackageSack as _rpmdbsack
import inspect
from rpmUtils import arch
from rpmUtils.transaction import initReadOnlyTransaction
import rpmUtils.miscutils
#############################################################
### Helper classes ##########################################
#############################################################
# Dummy translation wrapper
def _(msg):
return msg
# dummy save_ts to avoid lots of errors
def save_ts(*args, **kwargs):
pass
class FakeConf(object):
def __init__(self):
self.installonlypkgs = ['kernel']
self.exclude = []
self.debuglevel = 8
self.obsoletes = True
self.exactarch = False
self.exactarchlist = []
self.installroot = '/'
self.tsflags = []
self.installonly_limit = 0
self.skip_broken = False
self.skip_missing_names_on_install = True
self.skip_missing_names_on_update = True
self.disable_excludes = []
self.multilib_policy = 'best'
self.persistdir = '/should-not-exist-bad-test!'
self.showdupesfromrepos = False
self.uid = 0
self.groupremove_leaf_only = False
self.protected_packages = []
self.protected_multilib = False
self.clean_requirements_on_remove = True
self.upgrade_requirements_on_install = False
self.recheck_installed_requires = False
self.group_command = 'compat'
self.repopkgsremove_leaf_only = False
self.remove_leaf_only = False
self.config_file_path = '/dev/null'
self.config_file_age = 0
self.yumvar = {}
self.reposdir = '/tmp/XXXX'
self.diskspacecheck = True
self.depsolve_loop_limit = 10
self.override_install_langs = ''
self.requires_policy = "weak"
self.autosavets = True
class FakeSack:
""" Fake PackageSack to use with FakeRepository"""
def __init__(self):
pass # This is fake, so do nothing
def have_fastReturnFileEntries(self):
return True
class FakeRepo(object):
__fake_sack = FakeSack()
def __init__(self, id=None,sack=None):
self.id = id
if sack is None:
sack = self.__fake_sack
self.sack = sack
self.cost = 1000
self.compare_providers_priority = 80
def __cmp__(self, other):
""" Sort base class repos. by alphanumeric on their id, also
see __cmp__ in YumRepository(). """
if self.id > other.id:
return 1
elif self.id < other.id:
return -1
else:
return 0
class FakeYumDBInfo(object):
"""Simulate some functionality of RPMAdditionalDataPackage"""
_auto_hardlink_attrs = set(['checksum_type', 'reason',
'installed_by', 'changed_by',
'from_repo', 'from_repo_revision',
'from_repo_timestamp', 'releasever',
'command_line'])
def __init__(self, conf=None, pkgdir=None, yumdb_cache=None):
self.db = {}
for attr in self._auto_hardlink_attrs:
self.db[attr] = ''
def __getattr__(self, attr):
return self.db[attr]
def __setattr__(self, attr, value):
if not attr.startswith("db"):
self.db[attr] = value
else:
object.__setattr__(self, attr, value)
def __iter__(self, show_hidden=False):
for item in self.db:
yield item
def get(self, attr, default=None):
try:
res = self.db[attr]
except AttributeError:
return default
return res
class FakePackage(packages.YumAvailablePackage):
def __init__(self, name, version='1.0', release='1', epoch='0', arch='noarch', repo=None):
if repo is None:
repo = FakeRepo()
print "creating empty repo for %s-%s:%s-%s.%s " % (name, epoch,
version, release,
arch)
packages.YumAvailablePackage.__init__(self, repo)
self.name = name
self.version = version
self.ver = version
self.release = release
self.rel = release
self.epoch = epoch
self.arch = arch
self.pkgtup = (self.name, self.arch, self.epoch, self.version, self.release)
self.yumdb_info = FakeYumDBInfo()
self.prco['provides'].append((name, 'EQ', (epoch, version, release)))
self.prco['strong_requires'] = []
# Just a unique integer
self.id = self.__hash__()
self.pkgKey = self.__hash__()
self.required_pkgs = []
self.requiring_pkgs = []
def addProvides(self, name, flag=None, evr=(None, None, None)):
self.prco['provides'].append((name, flag, evr))
def addRequires(self, name, flag=None, evr=(None, None, None)):
self.prco['requires'].append((name, flag, evr))
self.prco['strong_requires'].append((name, flag, evr))
def addRequiresPkg(self, pkg):
self.required_pkgs.append(pkg)
def addRequiringPkg(self, pkg):
self.requiring_pkgs.append(pkg)
def addConflicts(self, name, flag=None, evr=(None, None, None)):
self.prco['conflicts'].append((name, flag, evr))
def addObsoletes(self, name, flag=None, evr=(None, None, None)):
self.prco['obsoletes'].append((name, flag, evr))
def addFile(self, name, ftype='file'):
self.files[ftype].append(name)
def required_packages(self):
return self.required_pkgs
def requiring_packages(self):
return self.requiring_pkgs
class _Container(object):
pass
class DepSolveProgressCallBack:
"""provides text output callback functions for Dependency Solver callback"""
def __init__(self):
"""requires yum-cli log and errorlog functions as arguments"""
self.verbose_logger = logging.getLogger("yum.verbose.cli")
self.loops = 0
def pkgAdded(self, pkgtup, mode):
modedict = { 'i': _('installed'),
'u': _('an update'),
'e': _('erased'),
'r': _('reinstalled'),
'd': _('a downgrade'),
'o': _('obsoleting'),
'ud': _('updated'),
'od': _('obsoleted'),}
(n, a, e, v, r) = pkgtup
modeterm = modedict[mode]
self.verbose_logger.log(logginglevels.INFO_2,
_('---> Package %s.%s %s:%s-%s will be %s'), n, a, e, v, r,
modeterm)
def start(self):
self.loops += 1
def tscheck(self):
self.verbose_logger.log(logginglevels.INFO_2, _('--> Running transaction check'))
def restartLoop(self):
self.loops += 1
self.verbose_logger.log(logginglevels.INFO_2,
_('--> Restarting Dependency Resolution with new changes.'))
self.verbose_logger.debug('---> Loop Number: %d', self.loops)
def end(self):
self.verbose_logger.log(logginglevels.INFO_2,
_('--> Finished Dependency Resolution'))
def procReq(self, name, formatted_req):
self.verbose_logger.log(logginglevels.INFO_2,
_('--> Processing Dependency: %s for package: %s'), formatted_req,
name)
def unresolved(self, msg):
self.verbose_logger.log(logginglevels.INFO_2, _('--> Unresolved Dependency: %s'),
msg)
def procConflict(self, name, confname):
self.verbose_logger.log(logginglevels.INFO_2,
_('--> Processing Conflict: %s conflicts %s'), name, confname)
def transactionPopulation(self):
self.verbose_logger.log(logginglevels.INFO_2, _('--> Populating transaction set '
'with selected packages. Please wait.'))
def downloadHeader(self, name):
self.verbose_logger.log(logginglevels.INFO_2, _('---> Downloading header for %s '
'to pack into transaction set.'), name)
#######################################################################
### Abstract super class for test cases ###############################
#######################################################################
class _DepsolveTestsBase(unittest.TestCase):
res = {0 : 'empty', 2 : 'ok', 1 : 'err'}
def __init__(self, methodName='runTest'):
unittest.TestCase.__init__(self, methodName)
self.pkgs = _Container()
self.buildPkgs(self.pkgs)
def setUp(self):
pass
def tearDown(self):
pass
@staticmethod
def buildPkgs(pkgs, *args):
"""Overload this staticmethod to create pkpgs that are used in several
test cases. It gets called from __init__ with self.pkgs as first parameter.
It is a staticmethod so you can call .buildPkgs() from other Tests to share
buildPkg code (inheritance doesn't work here, because we don't want to
inherit the test cases, too).
"""
pass
def assertResult(self, pkgs, optional_pkgs=[]):
"""Check if "system" contains the given pkgs. pkgs must be present,
optional_pkgs may be. Any other pkgs result in an error. Pkgs are
present if they are in the rpmdb and are not REMOVEd or they
are INSTALLed.
"""
errors = ["Unexpected result after depsolving: \n\n"]
pkgs = set(pkgs)
optional_pkgs = set(optional_pkgs)
installed = set()
for pkg in self.rpmdb:
# got removed
if self.tsInfo.getMembersWithState(pkg.pkgtup, TS_REMOVE_STATES):
if pkg in pkgs:
errors.append("Package %s was removed!\n" % pkg)
else: # still installed
if pkg not in pkgs and pkg not in optional_pkgs:
errors.append("Package %s was not removed!\n" % pkg)
installed.add(pkg)
for txmbr in self.tsInfo.getMembersWithState(output_states=TS_INSTALL_STATES):
installed.add(txmbr.po)
if txmbr.po not in pkgs and txmbr.po not in optional_pkgs:
errors.append("Package %s was installed!\n" % txmbr.po)
for pkg in pkgs - installed:
errors.append("Package %s was not installed!\n" % pkg)
if len(errors) > 1:
errors.append("\nTest case was:\n\n")
errors.extend(inspect.getsource(inspect.stack()[1][0].f_code))
errors.append("\n")
self.fail("".join(errors))
class FakeRpmDb(packageSack.PackageSack):
'''
We use a PackagePack for a Fake rpmdb insted of the normal
RPMDBPackageSack, getProvides works a little different on
unversioned requirements so we have to overload an add some
extra checkcode.
'''
def __init__(self):
packageSack.PackageSack.__init__(self)
# Need to mock out rpmdb caching... copy&paste. Gack.
def returnConflictPackages(self):
ret = []
for pkg in self.returnPackages():
if len(pkg.conflicts):
ret.append(pkg)
return ret
def returnObsoletePackages(self):
ret = []
for pkg in self.returnPackages():
if len(pkg.obsoletes):
ret.append(pkg)
return ret
def fileRequiresData(self):
installedFileRequires = {}
installedUnresolvedFileRequires = set()
resolved = set()
for pkg in self.returnPackages():
for name, flag, evr in pkg.requires:
if not name.startswith('/'):
continue
installedFileRequires.setdefault(pkg.pkgtup, []).append(name)
if name not in resolved:
dep = self.getProvides(name, flag, evr)
resolved.add(name)
if not dep:
installedUnresolvedFileRequires.add(name)
fileRequires = set()
for fnames in installedFileRequires.itervalues():
fileRequires.update(fnames)
installedFileProviders = {}
for fname in fileRequires:
pkgtups = [pkg.pkgtup for pkg in self.getProvides(fname)]
installedFileProviders[fname] = pkgtups
ret = (installedFileRequires, installedUnresolvedFileRequires,
installedFileProviders)
return ret
def transactionCacheFileRequires(self, installedFileRequires,
installedUnresolvedFileRequires,
installedFileProvides,
problems):
return
def transactionCacheConflictPackages(self, pkgs):
return
def transactionCacheObsoletePackages(self, pkgs):
return
def transactionResultVersion(self, rpmdbv):
return
def transactionReset(self):
return
def readOnlyTS(self):
# Should probably be able to "fake" this, so we can provide different
# get_running_kernel_pkgtup(). Bah.
return initReadOnlyTransaction("/")
def getProvides(self, name, flags=None, version=(None, None, None)):
"""return dict { packages -> list of matching provides }"""
self._checkIndexes(failure='build')
result = { }
# convert flags & version for unversioned reqirements
if not version:
version=(None, None, None)
if type(version) in (str, type(None), unicode):
version = rpmUtils.miscutils.stringToVersion(version)
if flags == '0':
flags=None
for po in self.provides.get(name, []):
hits = po.matchingPrcos('provides', (name, flags, version))
if hits:
result[po] = hits
if name[0] == '/':
hit = (name, None, (None, None, None))
for po in self.searchFiles(name):
result.setdefault(po, []).append(hit)
return result
#######################################################################
### Derive Tests from these classes or unittest.TestCase ##############
#######################################################################
class DepsolveTests(_DepsolveTestsBase):
"""Run depsolver on a manually set up transaction.
You can add pkgs to self.rpmdb or self.tsInfo. See
yum/transactioninfo.py for details.
A typical test case looks like:
def testInstallPackageRequireInstalled(self):
po = FakePackage('zsh', '1', '1', None, 'i386')
po.addRequires('zip', 'EQ', (None, '1.3', '2'))
self.tsInfo.addInstall(po)
ipo = FakePackage('zip', '1.3', '2', None, 'i386')
self.rpmdb.addPackage(ipo)
result, msg = self.resolveCode()
self.assertEquals('ok', result, msg)
self.assertResult((po, ipo))
"""
def setUp(self):
""" Called at the start of each test. """
_DepsolveTestsBase.setUp(self)
self.tsInfo = transactioninfo.TransactionData()
self.tsInfo.debug = 1
self.rpmdb = FakeRpmDb()
self.xsack = packageSack.PackageSack()
self.repo = FakeRepo("installed")
# XXX this side-affect is hacky:
self.tsInfo.setDatabases(self.rpmdb, self.xsack)
def resetTsInfo(self):
self.tsInfo = transactioninfo.TransactionData()
def resolveCode(self):
solver = YumBase()
solver.save_ts = save_ts
solver.conf = FakeConf()
solver.arch.setup_arch('x86_64')
solver.tsInfo = solver._tsInfo = self.tsInfo
solver.rpmdb = self.rpmdb
solver.pkgSack = self.xsack
for po in self.rpmdb:
po.repoid = po.repo.id = "installed"
for po in self.xsack:
if po.repo.id is None:
po.repo.id = "TestRepository"
po.repoid = po.repo.id
for txmbr in self.tsInfo:
if txmbr.ts_state in ('u', 'i'):
if txmbr.po.repo.id is None:
txmbr.po.repo.id = "TestRepository"
txmbr.po.repoid = txmbr.po.repo.id
else:
txmbr.po.repoid = txmbr.po.repo.id = "installed"
result, msg = solver.resolveDeps()
return (self.res[result], msg)
class OperationsTests(_DepsolveTestsBase):
"""Run a yum command (install, update, remove, ...) in a given set of installed
and available pkgs. Typical test case looks like:
def testUpdate(self):
p = self.pkgs
res, msg = self.runOperation(['update'], [p.installed], [p.update])
self.assert_(res=='ok', msg)
self.assertResult((p.update,))
To avoid creating the same pkgs over and over again overload the staticmethod
buildPkgs. It gets called from __init__ with self.pkgs as first parameter.
As it is a static method you can call .buildPkgs() from other Tests to share
buildPkg code.
"""
def runOperation(self, args, installed=[], available=[],
confs={}, multi_cmds=False):
"""Sets up and runs the depsolver. args[0] must be a valid yum command
("install", "update", ...). It might be followed by pkg names as on the
yum command line. The pkg objects in installed are added to self.rpmdb and
those in available to self.xsack which is the repository to resolve
requirements from.
"""
depsolver = YumBaseCli()
depsolver.save_ts = save_ts
depsolver.arch.setup_arch('x86_64')
self.rpmdb = depsolver.rpmdb = FakeRpmDb()
self.xsack = depsolver._pkgSack = packageSack.PackageSack()
self.repo = depsolver.repo = FakeRepo("installed")
depsolver.conf = FakeConf()
for conf in confs:
setattr(depsolver.conf, conf, confs[conf])
# We are running nosetest, so we want to see some yum output
# if a testcase if failing
depsolver.doLoggingSetup(9,9)
self.depsolver = depsolver
for po in installed:
po.repoid = po.repo.id = "installed"
self.depsolver.rpmdb.addPackage(po)
for po in available:
if po.repo.id is None:
po.repo.id = "TestRepository"
po.repoid = po.repo.id
self.depsolver._pkgSack.addPackage(po)
if not multi_cmds:
self.depsolver.basecmd = args[0]
self.depsolver.extcmds = args[1:]
res, msg = self.depsolver.doCommands()
else:
for nargs in args:
self.depsolver.basecmd = nargs[0]
self.depsolver.extcmds = nargs[1:]
res, msg = self.depsolver.doCommands()
if res != 2:
return res, msg
self.tsInfo = depsolver.tsInfo
if res!=2:
return res, msg
res, msg = self.depsolver.buildTransaction()
return self.res[res], msg
|
rpm-software-management/yum
|
test/testbase.py
|
Python
|
gpl-2.0
| 19,405
|
#!/usr/bin/env python
"""
books.py
reads a list of books from an input file and returns them filtered and sorted
features
- iterates through records without holding the entire dataset in memory, allowing for large datasets
- uses SQLite for storage and retrieval
"""
import os
import argparse
import sqlite3
from book_list.book_list_file_reader import BookListFileReader
from book_list.book_list import BookList
# Config
curdir = dir_path = os.path.dirname(os.path.realpath(__file__))
SQLITE3_DB_FILE = curdir + '/db/booklist.sqlite3'
file_import_list = {
'csv': curdir + '/code-test-source-files/csv',
'pipe': curdir + '/code-test-source-files/pipe',
'slash': curdir + '/code-test-source-files/slash',
}
# Command line parsing
parser = argparse.ArgumentParser(
prog='Read multiple formats of book data and display them filtered and sorted.'
)
parser.add_argument('--filter', action='store', default=None,
help='show a subset of books, looks for the argument as a substring of any of the fields')
parser.add_argument('--year', action='store_true', default=False,
help="sort the books by year, ascending instead of default sort")
parser.add_argument('--reverse', action='store_true', default=False,
help='reverse sort')
args = parser.parse_args()
# Read files and populate book list
sqlite3_connection = sqlite3.Connection(SQLITE3_DB_FILE);
book_list = BookList(sqlite3_connection)
for parse_type, file_path in file_import_list.iteritems():
reader = BookListFileReader(file_path, parse_type)
while True:
row = reader.get_result()
if row is None:
break
book_list.insert_record(row)
# Make query based on command line arguments
book_list.query_book_list(filter=args.filter, year=args.year, reverse=args.reverse)
# Output
while True:
row = book_list.get_record()
if row == None:
break
print("{}, {}, {}, {}".format(*row))
|
danieltalsky/gp-code-test
|
books.py
|
Python
|
unlicense
| 1,974
|
# Copyright 2016 Timothy M. Shead
#
# This file is part of Pipecat.
#
# Pipecat is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pipecat is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pipecat. If not, see <http://www.gnu.org/licenses/>.
"""Functions for working with JSON data.
"""
from __future__ import absolute_import, division, print_function
import json
import logging
import pipecat.record
log = logging.getLogger(__name__)
def parse(source, key="string", keyout="json"):
"""Parse JSON data from records.
This filter parses an incoming record key as JSON, appending the JSON data
to the output.
Parameters
----------
source: :ref:`Record generator <record-generators>`, required
key: :ref:`Record key <record-keys>`, optional
The key in incoming records to parse as JSON.
keyout: :ref:`Record key <record-keys>`, optional
The key in outgoing records where the parsed JSON will be stored.
Yields
------
record: dict
Input records with an additional `keyout` field containing JSON-compatible data.
"""
for record in source:
try:
pipecat.record.add_field(record, keyout, json.loads(record[key]))
yield record
except Exception as e:
log.error(e)
|
shead-custom-design/pipecat
|
pipecat/json.py
|
Python
|
gpl-3.0
| 1,738
|
from __future__ import absolute_import
class Difference(object):
def __init__(self, path, message):
self.path = path
self.message = message
def __unicode__(self):
return u"%s: %s" % (self.path_string, self.message)
def __str__(self):
return str(unicode(self))
def __repr__(self):
return "Difference(%s: %r)" % (self.path_string, self.message)
@property
def __diff_implementation__(self):
from .implementations import ImplementationBase, ChildDiffingMixing
class DiffImplementation(ChildDiffingMixing, ImplementationBase):
def path_and_child(self, diffable):
yield ".path", diffable.path_string
yield ".message", diffable.message
return DiffImplementation
@property
def path_string(self):
return ''.join(self.path)
|
lomereiter/treecompare
|
treecompare/difference.py
|
Python
|
bsd-2-clause
| 855
|
from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ccdsum',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# http://packaging.python.org/en/latest/tutorial.html#version
version='0.1.0',
description='A utility to summarize an XML element across multiple CCDs',
long_description=long_description,
# The project's main homepage.
url='https://github.com/joshhanna/ccd-sum',
# Author details
author='Josh Hanna',
author_email='josh@hanna.io',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Mathematics',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
# What does your project relate to?
keywords='ccd xml',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when your
# project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
install_requires=['lxml'],
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# package_data={
# 'sample': ['package_data.dat'],
# },
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages.
# see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
# entry_points={
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
)
|
joshhanna/ccd-sum
|
setup.py
|
Python
|
mit
| 3,734
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import fields, models, api
class Confirm(models.TransientModel):
_name = 'confirm'
_description = u'确认放款'
def _get_confirm_id(self):
return self.env['expense.account'].browse(self._context.get('active_id', []))
@api.onchange('confirm_id')
def _get_should_pay(self):
if self.confirm_id:
self.should_pay = self.confirm_id.expenses_sum or 0.0
self.actual_pay = self.confirm_id.expenses_sum or 0.0
confirm_id = fields.Many2one('expense.account', string=u'确认放款', default=_get_confirm_id,
readonly=True)
advance_way = fields.Selection([('cash', u'现金'), ('transfer', u'转账')], string=u'放款方式', default='cash',
required=True) # 放款方式
should_pay = fields.Float(string=u'应放款金额', readonly=True)
actual_pay = fields.Float(string=u'实放款金额', required=True)
def confirm(self):
self.confirm_id.state = 'advanced'
self.confirm_id.advance_way = self.advance_way
|
xhair/TopOdoo_Addons
|
ToproERP_Expense/wizard/expense_wizard.py
|
Python
|
agpl-3.0
| 1,180
|
"""
Test of timeseries ingestion
See http://predix01.cloud.answerhub.com/questions/21920/time-series-3.html?childToView=21931#answer-21931
and https://www.predix.io/resources/tutorials/tutorial-details.html?tutorial_id=1549&tag=1613&journey=Exploring%20Security%20services&resources=1594,1593,2105,1544,1549,2255,1951
"""
import logging
import time
import asyncio
import unittest
from thingflow.base import InputThing, Scheduler
from utils import make_test_output_thing_from_vallist
try:
from config_for_tests import PREDIX_TOKEN, PREDIX_ZONE_ID, \
PREDIX_INGEST_URL,PREDIX_QUERY_URL
except ImportError:
PREDIX_TOKEN=None
PREDIX_ZONE_ID=None
PREDIX_INGEST_URL=None
PREDIX_QUERY_URL=None
try:
import websocket
import requests
from thingflow.adapters.predix import *
PREREQS_AVAILABLE = True
except ImportError:
PREREQS_AVAILABLE = False
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
logging.basicConfig(level=logging.DEBUG)
VALUE_STREAM = [1, 2, 3, 4, 5]
@unittest.skipUnless(PREREQS_AVAILABLE,
"Predix prequisites not available")
@unittest.skipUnless(PREDIX_TOKEN is not None and PREDIX_ZONE_ID is not None and\
PREDIX_INGEST_URL is not None and PREDIX_QUERY_URL is not None,
"Predix not configured in config_for_tests.py")
class TestInput(InputThing):
"""Accept events from the predix reader and print them.
After the specified number, disconnect.
"""
def __init__(self, prev_in_chain, name):
self.events = []
self.name = name
self.disconnect = prev_in_chain.connect(self)
self.values = []
def on_next(self, x):
print(x)
self.values.append(x.val)
if len(self.values)==len(VALUE_STREAM):
self.disconnect()
print("TestInput %s disconnected" % self.name)
def on_completed(self):
print("Reader %s received %d events" % (self.name, len(self.events)))
TEST_SENSOR1 = 'test-sensor-1'
TEST_SENSOR2 = 'test-sensor-2'
@unittest.skipUnless(PREREQS_AVAILABLE,
"Predix prequisites not available")
@unittest.skipUnless(PREDIX_TOKEN is not None and PREDIX_ZONE_ID is not None and\
PREDIX_INGEST_URL is not None and PREDIX_QUERY_URL is not None,
"Predix not configured in config_for_tests.py")
class TestPredix(unittest.TestCase):
def test_batching(self):
"""We write out a set of event from two simulated sensors using an odd batch size (3).
We then read them back and verify that we got all the events.
"""
sensor1 = make_test_output_thing_from_vallist(TEST_SENSOR1, VALUE_STREAM)
sensor2 = make_test_output_thing_from_vallist(TEST_SENSOR2, VALUE_STREAM)
writer = PredixWriter(PREDIX_INGEST_URL, PREDIX_ZONE_ID, PREDIX_TOKEN,
extractor=EventExtractor(attributes={'test':True}),
batch_size=3)
sensor1.connect(writer)
sensor2.connect(writer)
scheduler = Scheduler(asyncio.get_event_loop())
scheduler.schedule_periodic(sensor1, 0.5)
scheduler.schedule_periodic(sensor2, 0.5)
start_time = time.time()
scheduler.run_forever()
# Now we read the events back
reader1 = PredixReader(PREDIX_QUERY_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, TEST_SENSOR1,
start_time=start_time,
one_shot=False)
reader2 = PredixReader(PREDIX_QUERY_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, TEST_SENSOR2,
start_time=start_time,
one_shot=False)
ti1 = TestInput(reader1, 'sensor-1')
ti2 = TestInput(reader2, 'sensor-2')
scheduler.schedule_periodic(reader1, 2)
scheduler.schedule_periodic(reader2, 2)
scheduler.run_forever()
self.assertListEqual(VALUE_STREAM, ti1.values)
self.assertListEqual(VALUE_STREAM, ti2.values)
def test_individual(self):
"""We write out a set of event from two simulated sensors using a batch size of 1.
We then read them back and verify that we got all the events.
"""
sensor1 = make_test_output_thing_from_vallist(TEST_SENSOR1, VALUE_STREAM)
sensor2 = make_test_output_thing_from_vallist(TEST_SENSOR2, VALUE_STREAM)
writer = PredixWriter(PREDIX_INGEST_URL, PREDIX_ZONE_ID, PREDIX_TOKEN,
extractor=EventExtractor(attributes={'test':True}),
batch_size=1)
sensor1.connect(writer)
sensor2.connect(writer)
scheduler = Scheduler(asyncio.get_event_loop())
scheduler.schedule_periodic(sensor1, 0.5)
scheduler.schedule_periodic(sensor2, 0.5)
start_time = time.time()
scheduler.run_forever()
# Now we read the events back
reader1 = PredixReader(PREDIX_QUERY_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, TEST_SENSOR1,
start_time=start_time,
one_shot=False)
reader2 = PredixReader(PREDIX_QUERY_URL, PREDIX_ZONE_ID, PREDIX_TOKEN, TEST_SENSOR2,
start_time=start_time,
one_shot=False)
ti1 = TestInput(reader1, 'sensor-1')
ti2 = TestInput(reader2, 'sensor-2')
scheduler.schedule_periodic(reader1, 2)
scheduler.schedule_periodic(reader2, 2)
scheduler.run_forever()
self.assertListEqual(VALUE_STREAM, ti1.values)
self.assertListEqual(VALUE_STREAM, ti2.values)
if __name__ == '__main__':
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
logger.addHandler(logging.StreamHandler())
logging.basicConfig(level=logging.DEBUG)
unittest.main()
|
mpi-sws-rse/thingflow-python
|
tests/test_predix.py
|
Python
|
apache-2.0
| 5,967
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-04-06 11:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bus', '0016_auto_20170406_1113'),
]
operations = [
migrations.AddField(
model_name='busroute',
name='bus_on_route',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bus_on_tis_route', to='bus.BusDetails'),
),
migrations.AlterField(
model_name='busdetails',
name='bus_company',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bus_compamy_name', to='bus.BusCompany'),
),
]
|
warlock57/bus_reservation
|
bus/migrations/0017_auto_20170406_1121.py
|
Python
|
mit
| 840
|
#!/usr/bin/env python
# This file is part of nexdatas - Tango Server for NeXus data writer
#
# Copyright (C) 2012-2014 DESY, Jan Kotanski <jkotan@mail.desy.de>
#
# nexdatas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# nexdatas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with nexdatas. If not, see <http://www.gnu.org/licenses/>.
# \package test nexdatas
# \file ExDSItemTest.py
# unittests for field Tags running Tango Server
#
import unittest
import sys
import struct
from nxsrecconfig.Describer import DSItem, ExDSItem
# if 64-bit machione
IS64BIT = (struct.calcsize("P") == 8)
# test fixture
class ExDSItemTest(unittest.TestCase):
# constructor
# \param methodName name of the test method
def __init__(self, methodName):
unittest.TestCase.__init__(self, methodName)
self._tfname = "field"
self._tfname = "group"
self._fattrs = {"short_name": "test", "units": "m"}
self._bint = "int64" if IS64BIT else "int32"
self._buint = "uint64" if IS64BIT else "uint32"
self._bfloat = "float64" if IS64BIT else "float32"
# test starter
# \brief Common set up
def setUp(self):
print("\nsetting up...")
# test closer
# \brief Common tear down
def tearDown(self):
print("tearing down ...")
# constructor test
# \brief It tests default settings
def test_constructor(self):
fun = sys._getframe().f_code.co_name
print("Run: %s.%s() " % (self.__class__.__name__, fun))
el = ExDSItem()
self.assertEqual(el.name, None)
self.assertEqual(el.dstype, None)
self.assertEqual(el.record, None)
self.assertEqual(el.mode, None)
self.assertEqual(el.nxtype, None)
self.assertEqual(el.shape, None)
el = ExDSItem(None, "mymode", "mynxtype", [23, 4, 5])
self.assertEqual(el.name, None)
self.assertEqual(el.dstype, None)
self.assertEqual(el.record, None)
self.assertEqual(el.mode, "mymode")
self.assertEqual(el.nxtype, "mynxtype")
self.assertEqual(el.shape, [23, 4, 5])
el = DSItem("myname3", "mytype3", "myrecord3")
self.assertEqual(el.name, "myname3")
self.assertEqual(el.dstype, "mytype3")
self.assertEqual(el.record, "myrecord3")
el2 = ExDSItem(el, "mymode2", "mynxtype2", [4, 5])
self.assertEqual(el2.name, "myname3")
self.assertEqual(el2.dstype, "mytype3")
self.assertEqual(el2.record, "myrecord3")
self.assertEqual(el2.mode, "mymode2")
self.assertEqual(el2.nxtype, "mynxtype2")
self.assertEqual(el2.shape, [4, 5])
if __name__ == '__main__':
unittest.main()
|
nexdatas/recselector
|
test/ExDSItem_test.py
|
Python
|
gpl-3.0
| 3,177
|
# -*- coding: utf-8 -*-
import re
from app.common.http_methods import get_request
from app.common.string_methods import everything_between, get_int_from_regex, get_values_from_regex
from app.common.target_parse_strings import PM_NB_REGEX, PM_BOX_PM_IDS_REGEX, PM_BEGIN_MESSAGE_PATTERN, \
PM_END_MESSAGE_PATTERN
from app.common.target_urls import PM_BOX_URL, PM_OPEN_URL
from django.utils.html import strip_tags
def __get_mp_nb(html_page):
return get_int_from_regex(PM_NB_REGEX, html_page)
def get_pm_ids(pm_box_page):
pm_ids = get_values_from_regex(PM_BOX_PM_IDS_REGEX, pm_box_page)
return pm_ids
def read_mp():
result = ''
pm_box_page = get_request(PM_BOX_URL)
pm_ids = get_pm_ids(pm_box_page)
mp_nb = len(pm_ids)
for pm_id in pm_ids:
page = get_request(PM_OPEN_URL.format(pm_id=pm_id))
begin = PM_BEGIN_MESSAGE_PATTERN
end = PM_END_MESSAGE_PATTERN
mp_content = everything_between(page, begin, end)
mp_content = strip_tags(re.findall('(.[\S+\n\r\s]+)<br /><br /></td>', mp_content)[0])
result += mp_content + '\n\n\n'
return {
'mp_nb': mp_nb,
'result': result
}
|
egenerat/gae-django
|
app/pm/pm_parser.py
|
Python
|
mit
| 1,179
|
# -*- coding: utf-8 -*-
"""
Component management module for lpm
A component definition consists of (at least):
- an ID (part number)
- a descriptive name
- a description
- a category
- a list of suppliers
- a list of manufacturers
- a list of revisions consisting of:
- a description
- a list of files
- a 'released' flag
- an 'obsolete' flag
- a history
Additional data may be specified as standard Python dictionary entries.
The latest revision is always the active one while earlier revisions are kept for archiving purposes.
Once a component is released it is immutable. Only admins can change the released flag or mark a component as obsolete.
The rules of access are as follows:
- anyone may view component definitions
- component_edit users may additionally:
- create new components
- modify components that are not currently released.
- create a new revision for components that are released.
- component_admin users may additionally:
- release / un-release components
- obsolete components
Valid categories can be defined with the LPM_COMPONENT_CATEGORIES configuration entry.
Note: There is no lock mechanism available, i.e. multiple users may edit the same component simultaneously.
:copyright: (c) 2016 Hannes Friederich.
:license: BSD, see LICENSE for more details.
"""
import re
import os
from datetime import datetime
from werkzeug import secure_filename
from flask import Blueprint, current_app, render_template, flash, abort, redirect, url_for, request, send_from_directory
from flask.ext.login import login_required, current_user
from pymongo import ReturnDocument
from pymongo.errors import DuplicateKeyError
from flask_wtf import Form
from wtforms import TextAreaField, StringField, SubmitField, FileField, SelectField
from wtforms.validators import InputRequired
from lpm.login import role_required
from lpm.utils import extract_errors
bp = Blueprint('components', __name__)
class ComponentForm(Form):
name = StringField(label='Name', validators=[InputRequired()])
description = TextAreaField(label='Description')
category = SelectField(label='Category', validators=[InputRequired()])
comment = TextAreaField(label='Revision Comment')
supplier1 = StringField(label='Supplier 1')
supplier1part = StringField('Supplier 1 Part Number')
supplier2 = StringField(label='Supplier 2')
supplier2part = StringField('Supplier 2 Part Number')
manufacturer1 = StringField(label='Manufacturer 1')
manufacturer1part = StringField('Manufacturer 1 Part Number')
manufacturer2 = StringField(label='Manufacturer 2')
manufacturer2part = StringField('Manufacturer 2 Part Number')
class UploadForm(Form):
file = FileField(label='File')
class RevisionForm(Form):
comment = TextAreaField(label='Revision Comment')
class ReleaseForm(Form):
action = SubmitField(label='Release')
class UnReleaseForm(Form):
action = SubmitField(label='Un-Release')
class ObsoleteForm(Form):
action = SubmitField(label='Mark as Obsolete')
@bp.route('/')
@login_required
def overview():
"""
Shows the overview page containing all components
"""
filter = {'obsolete': False}
if request.args.get('show_obsolete'):
filter = None
data = current_app.mongo.db.components.find(filter=filter, projection=['name', 'category', 'obsolete', 'released'])
return render_template('components/overview.html', data=data,
show_obsolete=request.args.get('show_obsolete'))
@bp.route('/<partno>')
@login_required
def details(partno):
"""
Shows the details about the given component
- Only component_edit users may look at specific revisions
- All other only see the latest revision
"""
try:
pn = PartNumber(partno)
except ValueError:
abort(404)
# redirect to the revisionless URL if the user cannot view outdated revisions
if pn.revision is not None and not current_user.has_role('component_edit'):
return redirect(url_for('components.details', partno=pn.base_number))
# ensure the object exists and the revision is valid
obj = current_app.mongo.db.components.find_one_or_404(pn.base_number)
# ensure the desired revision exists
num_revisions = len(obj.get('revisions', list()))
if pn.revision_number is not None and pn.revision_number >= num_revisions:
abort(404)
pn.set_num_revisions(num_revisions)
files = _get_files(pn.id)
preview_file = None
for file in files:
if file.startswith('preview.'):
preview_file = file
break
return render_template('components/details.html', data=obj,
partno=pn, files=files, preview_file=preview_file)
@bp.route('/<partno>/<file>')
@login_required
def file(partno, file):
"""
Sends the specified file, after performing some access checks.
Only component_edit users may look at all revisions, all other may only see the latest revision
"""
try:
pn = PartNumber(partno)
except ValueError:
abort(404)
# a revision must be specified
if pn.revision is None:
abort(404)
# ensure the object exists and the revision is valid
obj = current_app.mongo.db.components.find_one_or_404(pn.base_number)
# ensure the desired revision exists
num_revisions = len(obj.get('revisions', list()))
assert pn.revision_number is not None
if pn.revision_number >= num_revisions:
abort(404)
pn.set_num_revisions(num_revisions)
if pn.is_outdated() and not current_user.has_role('component_edit'):
abort(403)
# instruct werkzeug to stream the file
dir = os.path.join(current_app.config['LPM_COMPONENT_FILES_DIR'], partno)
return send_from_directory(dir, file)
@bp.route('/add', methods=['GET', 'POST'])
@role_required('component_edit')
def add():
"""
Presents the form to add a new component, and adds it to the database if submitted
"""
form = ComponentForm(request.form)
form.category.choices = _get_categories()
# form submittal handling
if request.method == 'POST' and form.validate_on_submit():
id = _create_new_partno()
suppliers = _extract_suppliers(form)
manufacturers = _extract_manufacturers(form)
now = datetime.now()
obj = dict(_id=id,
name=form.name.data,
description=form.description.data,
category=form.category.data,
suppliers=suppliers,
manufacturers=manufacturers,
revisions=[{'date': now, 'comment': form.comment.data}],
released=False,
obsolete=False,
history=[{'date': now, 'user': current_user.id, 'message': 'created'}])
try:
current_app.mongo.db.components.insert(obj)
flash('component successfully created', 'success')
return redirect(url_for('components.details', partno=id))
except DuplicateKeyError as e:
flash('data insertion failed (%s), please contact the administrator' % e, 'error')
extract_errors(form)
return render_template('components/new_form.html', form=form, type=type)
@bp.route('/<partno>/edit', methods=['GET', 'POST'])
@role_required('component_edit')
def edit(partno):
"""
Presents the form to edit an already existing component
"""
obj = _load_if_unreleased(partno)
# prepare the form data
revisions = obj.get('revisions')
suppliers = obj.get('suppliers', list())
manufacturers = obj.get('manufacturers', list())
revidx = len(revisions)-1
num_suppliers = len(suppliers)
num_manufacturers = len(manufacturers)
data = dict(name=obj.get('name'),
description=obj.get('description'),
category=obj.get('category'),
comment=revisions[revidx].get('comment'))
if num_suppliers > 0:
data['supplier1'] = suppliers[0].get('name')
data['supplier1part'] = suppliers[0].get('partno')
if num_suppliers > 1:
data['supplier2'] = suppliers[1].get('name')
data['supplier2part'] = suppliers[1].get('partno')
if num_manufacturers > 0:
data['manufacturer1'] = manufacturers[0].get('name')
data['manufacturer1part'] = manufacturers[0].get('partno')
if num_manufacturers > 1:
data['manufacturer2'] = manufacturers[1].get('name')
data['manufacturer2part'] = manufacturers[1].get('partno')
form = ComponentForm(request.form, data=data)
form.category.choices = _get_categories()
# form submittal handling
# use $set for the updated fields, directly update the latest revision
# add a comment in the history
if request.method == 'POST' and form.validate_on_submit():
suppliers = _extract_suppliers(form)
manufacturers = _extract_manufacturers(form)
set_data = dict(name=form.name.data,
description=form.description.data,
category=form.category.data,
suppliers=suppliers,
manufacturers=manufacturers)
set_data['revisions.'+str(revidx)+'.comment'] = form.comment.data
result = current_app.mongo.db.components.update_one(
filter={'_id': partno},
update={
'$set': set_data,
'$push': {
'history': {
'date': datetime.now(),
'user': current_user.id,
'message': 'updated',
}
}
}
)
if result.modified_count == 1:
flash('data successfully updated', 'success')
else:
# should not happen. If the ID is wrong, the initial lookup will fail
flash('no data modified, please contact the administrator', 'error')
return redirect(url_for('components.details', partno=partno))
extract_errors(form)
return render_template('components/edit_form.html', form=form, partno=partno)
@bp.route('/<partno>/fileupload', methods=['GET', 'POST'])
@role_required('component_edit')
def fileupload(partno):
"""
Presents the form to upload a new file for the design item.
Stores the uploaded file in the correct location upon POST submit
"""
# the part number must be valid
try:
pn = PartNumber(partno)
except ValueError:
abort(404)
# the revision must be specified
if pn.revision is None:
abort(404)
# check the data
obj = _load_if_unreleased(pn.base_number)
# ensure the desired revision exists
num_revisions = len(obj.get('revisions', list()))
if pn.revision_number >= num_revisions:
abort(404)
pn.set_num_revisions(num_revisions)
if pn.is_outdated():
flash('cannot upload files to outdated revisions', 'error')
return redirect(url_for('components.details', partno=partno))
form = UploadForm(request.form)
# WTF is NOT used for the file handling, since the file upload handling seems broken.
file = request.files.get('file')
if request.method == 'POST' and form.validate_on_submit() and file:
try:
filename = secure_filename(file.filename)
dir = os.path.join(current_app.config['LPM_COMPONENT_FILES_DIR'], partno)
if not os.path.exists(dir):
os.makedirs(dir)
path = os.path.join(dir, filename)
file.save(path)
flash('file successfully uploaded', 'success')
return redirect(url_for('components.details', partno=partno))
except Exception as e:
flash(e, 'error')
extract_errors(form)
return render_template('components/upload_form.html', form=form, partno=partno)
@bp.route('/<partno>/new-revision', methods=['GET', 'POST'])
@role_required('component_edit')
def new_revision(partno):
"""
Presents the form to add a new revision, and creates it upon POST submit
"""
_load_if_released(partno) # ensures the component exists and is released
form = RevisionForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
now = datetime.now()
result = current_app.mongo.db.components.update_one(
filter={'_id': partno},
update={
'$set': {
'released': False # a new revision is not already released
},
'$push': {
'revisions': {
'date': now,
'comment': form.comment.data
},
'history': {
'date': now,
'user': current_user.id,
'message': 'new revision created'
}
}
}
)
if result.modified_count == 1:
flash('new revision created', 'success')
else:
# should not happen.
flash('no data modified, please contact the administrator', 'error')
return redirect(url_for('components.details', partno=partno))
extract_errors(form)
return render_template('components/revision_form.html', form=form, partno=partno)
@bp.route('/<partno>/release', methods=['GET', 'POST'])
@role_required('component_admin')
def release(partno):
"""
Releases the component when a POST form is submitted
"""
obj = _load_if_unreleased(partno)
form = ReleaseForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
result = current_app.mongo.db.components.update_one(
filter={'_id': partno},
update={
'$set': {
'released': True
},
'$push': {
'history': {
'date': datetime.now(),
'user': current_user.id,
'message': 'released'
}
}
}
)
if result.modified_count == 1:
flash('component released', 'success')
else:
# should not happen.
flash('no data modified, please contact the administrator', 'error')
return redirect(url_for('components.details', partno=partno))
extract_errors(form)
return render_template('components/release_form.html', data=obj, form=form)
@bp.route('/<partno>/unrelease', methods=['GET', 'POST'])
@role_required('component_admin')
def unrelease(partno):
"""
Un-releases the component when a POST form is submitted
"""
obj = _load_if_released(partno)
form = UnReleaseForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
result = current_app.mongo.db.components.update_one(
filter={'_id': partno},
update={
'$set': {
'released': False
},
'$push': {
'history': {
'date': datetime.now(),
'user': current_user.id,
'message': 'un-released'
}
}
}
)
if result.modified_count == 1:
flash('component un-released', 'success')
else:
# should not happen.
flash('no data modified, please contact the administrator', 'error')
return redirect(url_for('components.details', partno=partno))
extract_errors(form)
return render_template('components/unrelease_form.html', data=obj, form=form)
@bp.route('/<partno>/make-obsolete', methods=['GET', 'POST'])
@role_required('component_admin')
def make_obsolete(partno):
"""
Marks the given component as obsolete.
Precondition: The user must have the admin role and the item must not already be obsolete
"""
obj = _load_if_active(partno)
form = ObsoleteForm(request.form)
if request.method == 'POST' and form.validate_on_submit():
result = current_app.mongo.db.components.update_one(
filter={'_id': partno},
update={
'$set': {
'obsolete': True
},
'$push': {
'history': {
'date': datetime.now(),
'user': current_user.id,
'message': 'component obsoleted'
}
}
}
)
if result.modified_count == 1:
flash('component obsoleted', 'success')
else:
# should not happen.
flash('no data modified, please contact the administrator', 'error')
return redirect(url_for('components.details', partno=partno))
extract_errors(form)
return render_template('components/obsolete_form.html', data=obj, form=form)
def ensure_exists(partno):
"""
Ensures that the given part number does exist in the database and raises
ValueError if the item does not exist.
"""
obj = current_app.mongo.db.components.find_one(partno)
if not obj:
raise ValueError('unknown part number %s' % partno)
def _create_new_partno():
"""
Creates and returns a new part number (component ID).
The new number is retrieved from the database and prefixed with the configured prefix
"""
data = current_app.mongo.db.unique_numbers.find_one_and_update(
{'_id': 'partno'},
{'$inc': {'seq': 1}},
upsert=True, # creates the item if needed
return_document=ReturnDocument.AFTER
)
prefix = current_app.config.get('LPM_PARTNO_PREFIX', '')
return '%s%04d' % (prefix, data['seq'])
def _get_files(partno):
"""
Returns a list of files belonging to the given part number
"""
try:
dir = os.path.join(current_app.config['LPM_COMPONENT_FILES_DIR'], partno)
return sorted(os.listdir(dir))
except:
return list()
def _load_if_active(partno):
"""
Loads the component with given ID from the database and returns it.
Aborts with 404 if the component is not found.
Flashes an error message and redirects to the details page if the component is obsolete
"""
obj = current_app.mongo.db.components.find_one_or_404(partno)
if obj.get('obsolete', True):
flash('Invalid operation for obsolete components', 'error')
abort(redirect(url_for('components.details', partno=partno)))
return obj
def _load_if_released(partno):
"""
Loads the component with given ID from the database and returns it.
Aborts with 404 if the component is not found.
Flashes an error message and redirects to the details page if the component is not released
"""
obj = _load_if_active(partno)
if not obj.get('released', False):
flash('Invalid operation for non-released components', 'error')
abort(redirect(url_for('components.details', partno=partno)))
return obj
def _load_if_unreleased(partno):
"""
Loads the component with given ID from the database and returns it.
Aborts with 404 if the component is not found.
Flashes an error message and redirects to the details page if the component is not released
"""
obj = _load_if_active(partno)
if obj.get('released', True):
flash('Invalid operation for released components', 'error')
abort(redirect(url_for('components.details', partno=partno)))
return obj
def _extract_suppliers(form):
"""
Extracts the list of suppliers from the form data
"""
suppliers = list()
if form.supplier1.data:
suppliers.append({'name': form.supplier1.data, 'partno': form.supplier1part.data})
if form.supplier2.data:
suppliers.append({'name': form.supplier2.data, 'partno': form.supplier2part.data})
return suppliers
def _extract_manufacturers(form):
"""
Extracts the list of manufacturers from the form data
"""
manufacturers = list()
if form.manufacturer1.data:
manufacturers.append({'name': form.manufacturer1.data, 'partno': form.manufacturer1part.data})
if form.manufacturer2.data:
manufacturers.append({'name': form.manufacturer2.data, 'partno': form.manufacturer2part.data})
return manufacturers
def _get_categories():
return [(c, c) for c in current_app.config.get('LPM_COMPONENT_CATEGORIES', set())]
class PartNumber:
"""
Class that encapsulates parsing and revision handling of part numbers
"""
pattern = re.compile('^([A-Z]+\d{4})([a-z])?$')
def __init__(self, partno):
match = PartNumber.pattern.match(partno)
if not match:
raise ValueError("string '%s' is not a valid part number" % str)
self._baseno = match.group(1)
self._rev = match.group(2)
self._num_revisions = None
def set_num_revisions(self, num_revisions):
"""
Sets the number of revisions and assigns the latest revision if the revision has not been already set.
The number of revisions must be > 0
"""
assert num_revisions > 0
self._num_revisions = num_revisions
if self._rev is None:
self._rev = PartNumber.revision_repr(num_revisions-1)
@property
def id(self):
v = self._baseno
if self._rev is not None:
v += self._rev
return v
@property
def base_number(self):
return self._baseno
@property
def revision(self):
return self._rev
@property
def revision_number(self):
return None if self._rev is None else ord(self._rev) - ord('a')
def is_outdated(self):
"""
Returns whether the given revision is outdated. The number of revisions must have been set previously
"""
assert self._num_revisions is not None
assert self._rev is not None
return self._num_revisions > self.revision_number+1
@classmethod
def revision_repr(cls, revision):
return chr(revision + ord('a'))
def revision_id(self, revision):
return self._baseno + PartNumber.revision_repr(revision)
def __repr__(self):
return self.id
|
h-friederich/lpm
|
components.py
|
Python
|
bsd-3-clause
| 22,637
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Professor(Package):
"""Professor Monte-Carlo tuning package"""
homepage = "https://professor.hepforge.org/"
url = "https://professor.hepforge.org/downloads/?f=Professor-2.3.3.tar.gz"
maintainers = ['mjk655']
version('2.3.3', sha256='60c5ba00894c809e2c31018bccf22935a9e1f51c0184468efbdd5d27b211009f')
depends_on('wxwidgets')
depends_on('yoda')
depends_on('eigen')
depends_on('py-cython')
depends_on('py-iminuit')
depends_on('py-matplotlib')
def install(self, spec, prefix):
make()
make('PREFIX={0}'.format(prefix), "install")
|
LLNL/spack
|
var/spack/repos/builtin/packages/professor/package.py
|
Python
|
lgpl-2.1
| 825
|
# Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.simpleapi import *
from mantid.api import *
from testhelpers import *
from numpy import *
class GetEiT0atSNSTest(unittest.TestCase):
def testGETS(self):
w=Load('ADARAMonitors.nxs')
LoadInstrument(Workspace=w,InstrumentName='SEQUOIA',RewriteSpectraMap=False)
AddSampleLog(Workspace=w,LogName='vChTrans',LogText='1',LogType='Number Series')
AddSampleLog(Workspace=w,LogName='EnergyRequest',LogText='20',LogType='Number Series')
res=GetEiT0atSNS(w)
self.assertAlmostEqual(res[0],20.09,places=2)
self.assertAlmostEqual(res[1],30.415,places=2)
try:
res=GetEiT0atSNS(w,0.1)
except Exception as e:
s="Could not get Ei, and this is not a white beam run\nNo peak found for the monitor with spectra num: 2"
self.assertEqual(str(e).find(s),0)
DeleteWorkspace(w)
if __name__ == '__main__':
unittest.main()
|
mganeva/mantid
|
Framework/PythonInterface/test/python/plugins/algorithms/GetEiT0atSNSTest.py
|
Python
|
gpl-3.0
| 1,292
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import Guess
from guessit.transfo import SingleNodeGuesser
from guessit.patterns import weak_episode_rexps
import re
import logging
log = logging.getLogger(__name__)
def guess_weak_episodes_rexps(string, node):
if 'episodeNumber' in node.root.info:
return None, None
for rexp, span_adjust in weak_episode_rexps:
match = re.search(rexp, string, re.IGNORECASE)
if match:
metadata = match.groupdict()
span = (match.start() + span_adjust[0],
match.end() + span_adjust[1])
epnum = int(metadata['episodeNumber'])
if epnum > 100:
season, epnum = epnum // 100, epnum % 100
# episodes which have a season > 25 are most likely errors
# (Simpsons is at 23!)
if season > 25:
continue
return Guess({ 'season': season,
'episodeNumber': epnum },
confidence=0.6), span
else:
return Guess(metadata, confidence=0.3), span
return None, None
guess_weak_episodes_rexps.use_node = True
def process(mtree):
SingleNodeGuesser(guess_weak_episodes_rexps, 0.6, log).process(mtree)
|
Branlala/docker-sickbeardfr
|
sickbeard/lib/guessit/transfo/guess_weak_episodes_rexps.py
|
Python
|
mit
| 2,126
|
import _plotly_utils.basevalidators
class BordercolorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(
self,
plotly_name="bordercolor",
parent_name="scattergeo.marker.colorbar",
**kwargs
):
super(BordercolorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/scattergeo/marker/colorbar/_bordercolor.py
|
Python
|
mit
| 510
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2014, 2015, 2016 Adam.Dybbroe
# Author(s):
# Adam.Dybbroe <a000680@c14526.ad.smhi.se>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Reader and data handler for the Synop reports from DWD
"""
#TESTFILE = "./DataFromDwd/201403/sy_SYNOP_20140306.qc"
TESTFILE = "/data/proj6/saf/adybbroe/satellite_synop_matchup/DataFromDwd/201403/sy_SYNOP_20140306.qc"
filename = TESTFILE
#from astropy.io import ascii
import pandas as pd
import numpy as np
from datetime import datetime
def get_data(filename):
"""Get all Synop data from one file"""
convert_datefunc = lambda x: datetime.strptime(x, '%Y%m%d%H%M')
dtype = [('date', object), ('station', '|S5'),
('lat', 'f8'), ('lon', 'f8'),
('msl', 'f8'), ('nix', 'i4'),
('pressure', 'f8'), ('temp', 'f8'),
('dtemp', 'f8'),
('total_cloud_cover', 'i4'),
('nh', 'i4'),
('cl', 'i4'),
('cm', 'i4'),
('ch', 'i4'),
('vvvv', 'i4'),
('ww', 'i4'),
]
data = np.genfromtxt(filename,
skip_header=1,
skip_footer=35,
usecols=(
0, 1, 2, 3, 4, 5, 6, 7, 8, 10, 11, 13, 14, 15, 28, 29),
dtype=dtype,
unpack=True,
converters={0: convert_datefunc,
2: lambda x: float(x) / 100.,
3: lambda x: float(x) / 100.,
6: lambda x: float(x) / 10.,
7: lambda x: float(x) / 10.,
8: lambda x: float(x) / 10., })
return pd.DataFrame(data)
if __name__ == "__main__":
synop = get_data(TESTFILE)
mystation = synop[synop['station'] == '01023']
print mystation['total_cloud_cover']
|
adybbroe/pysatsynop-matchup
|
synop_dwd.py
|
Python
|
gpl-3.0
| 2,593
|
"""
CTF API Configuration File
Note this is just a python script. It does config things.
"""
from api.common import WebException
import api
import datetime
import json
import api.app
""" FLASK """
#api.app.session_cookie_domain = "0.0.0.0"
api.app.session_cookie_path = "/"
api.app.session_cookie_name = "flask"
# KEEP THIS SECRET
api.app.secret_key = "5XVbne3AjPH35eEH8yQI"
""" SECURITY """
api.common.allowed_protocols = ["https", "http"]
api.common.allowed_ports = [8080]
""" MONGO """
api.common.mongo_db_name = "ctf"
api.common.mongo_addr = "127.0.0.1"
api.common.mongo_port = 27017
""" TESTING """
testing_mongo_db_name = "ctf_test"
testing_mongo_addr = "127.0.0.1"
testing_mongo_port = 27017
""" SETUP """
competition_name = "ctf"
competition_urls = ["http://192.168.2.2"]
# Helper class for timezones
class EST(datetime.tzinfo):
def __init__(self, utc_offset):
self.utc_offset = utc_offset
def utcoffset(self, dt):
return datetime.timedelta(hours=-self.utc_offset)
def dst(self, dt):
return datetime.timedelta(0)
""" CTF Settings
These are the default settings that will be loaded
into the database if no settings are already loaded.
"""
default_settings = {
"enable_teachers": True,
"enable_feedback": True,
# TIME WINDOW
"start_time": datetime.datetime.utcnow(),
"end_time": datetime.datetime.utcnow(),
# EMAIL WHITELIST
"email_filter": [],
# TEAMS
"max_team_size": 1,
# ACHIEVEMENTS
"achievements": {
"enable_achievements": True,
"processor_base_path": "./achievements",
},
"username_blacklist": [
"root",
"daemon",
"bin",
"sys",
"adm",
"tty",
"disk",
"lp",
"mail",
"news",
"uucp",
"man",
"proxy",
"kmem",
"dialout",
"fax",
"voice",
"cdrom",
"floppy",
"tape",
"sudo",
"audio",
"dip",
"backup",
"operator",
"list",
"irc",
"src",
"gnats",
"shadow",
"utmp",
"video",
"sasl",
"plugdev",
"staff",
"games",
"users",
"nogroup",
"input",
"netdev",
"crontab",
"syslog",
"fuse",
"messagebus",
"uuidd",
"mlocate",
"ssh",
"landscape",
"admin",
"vagrant",
"scanner",
"colord",
"vboxsf",
"puppet",
"ubuntu",
"utempter",
"shellinabox",
"docker",
"competitors",
"hacksports",
],
# EMAIL (SMTP)
"email":{
"enable_email": False,
"email_verification": False,
"smtp_url":"",
"smtp_port": 587,
"email_username": "",
"email_password": "",
"from_addr": "",
"from_name": "",
"max_verification_emails": 3,
"smtp_security": "TLS"
},
# CAPTCHA
"captcha": {
"enable_captcha": False,
"captcha_url": "https://www.google.com/recaptcha/api/siteverify",
"reCAPTCHA_public_key": "",
"reCAPTCHA_private_key": "",
},
# LOGGING
# Will be emailed any severe internal exceptions!
# Requires email block to be setup.
"logging": {
"admin_emails": ["ben@example.com", "joe@example.com"],
"critical_error_timeout": 600
}
}
""" Helper functions to get settings. Do not change these """
def get_settings():
db = api.common.get_conn()
settings = db.settings.find_one({}, {"_id":0})
if settings is None:
db.settings.insert(default_settings)
return default_settings
return settings
def change_settings(changes):
db = api.common.get_conn()
settings = db.settings.find_one({})
def check_keys(real, changed):
keys = list(changed.keys())
for key in keys:
if key not in real:
raise WebException("Cannot update setting for '{}'".format(key))
elif type(real[key]) != type(changed[key]):
raise WebException("Cannot update setting for '{}'".format(key))
elif isinstance(real[key], dict):
check_keys(real[key], changed[key])
# change the key so mongo $set works correctly
for key2 in changed[key]:
changed["{}.{}".format(key,key2)] = changed[key][key2]
changed.pop(key)
check_keys(settings, changes)
db.settings.update({"_id":settings["_id"]}, {"$set": changes})
|
picoCTF/picoCTF-web
|
api/config.py
|
Python
|
mit
| 4,633
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2019, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from __future__ import print_function
import abc
import torch
import torch.nn as nn
import torch.nn.functional as F
from htmresearch.frameworks.pytorch.duty_cycle_metrics import (
maxEntropy, binaryEntropy
)
from htmresearch.frameworks.pytorch.functions import k_winners, k_winners2d
def getEntropy(m):
"""
Function used to get the current and max entropies of KWinners modules.
:param m: any module
:return: (currentEntropy, maxEntropy)
"""
if isinstance(m, KWinnersBase):
return m.entropy(), m.maxEntropy()
else:
return 0.0, 0.0
def getEntropies(m):
"""
Recursively get the current and max entropies from every child module
:param m: any module
:return: (currentEntropy, maxEntropy)
"""
entropy = 0.0
max_entropy = 0.0
for module in m.children():
e, m = getEntropies(module)
entropy += e
max_entropy += m
e, m = getEntropy(m)
entropy += e
max_entropy += m
return entropy, max_entropy
def updateBoostStrength(m):
"""
Function used to update KWinner modules boost strength after each epoch.
Call using :meth:`torch.nn.Module.apply` after each epoch if required
For example: ``m.apply(updateBoostStrength)``
:param m: KWinner module
"""
if isinstance(m, KWinnersBase):
if m.training:
m.boostStrength = m.boostStrength * m.boostStrengthFactor
class KWinnersBase(nn.Module):
"""
Base KWinners class
"""
__metaclass__ = abc.ABCMeta
def __init__(self, n, k, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinnersBase, self).__init__()
assert (boostStrength >= 0.0)
self.n = n
self.k = k
self.kInferenceFactor = kInferenceFactor
self.learningIterations = 0
# Boosting related parameters
self.boostStrength = boostStrength
self.boostStrengthFactor = boostStrengthFactor
self.dutyCyclePeriod = dutyCyclePeriod
def getLearningIterations(self):
return self.learningIterations
@abc.abstractmethod
def updateDutyCycle(self, x):
"""
Updates our duty cycle estimates with the new value. Duty cycles are
updated according to the following formula:
.. math::
dutyCycle = \\frac{dutyCycle \\times \\left( period - batchSize \\right)
+ newValue}{period}
:param x:
Current activity of each unit
"""
raise NotImplementedError
def updateBoostStrength(self):
"""
Update boost strength using given strength factor during training
"""
if self.training:
self.boostStrength = self.boostStrength * self.boostStrengthFactor
def entropy(self):
"""
Returns the current total entropy of this layer
"""
if self.k < self.n:
_, entropy = binaryEntropy(self.dutyCycle)
return entropy
else:
return 0
def maxEntropy(self):
"""
Returns the maximum total entropy we can expect from this layer
"""
return maxEntropy(self.n, self.k)
class KWinners(KWinnersBase):
"""
Applies K-Winner function to the input tensor
See :class:`htmresearch.frameworks.pytorch.functions.k_winners`
"""
def __init__(self, n, k, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinners, self).__init__(n=n, k=k,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
boostStrengthFactor=boostStrengthFactor,
dutyCyclePeriod=dutyCyclePeriod)
self.register_buffer("dutyCycle", torch.zeros(self.n))
def forward(self, x):
# Apply k-winner algorithm if k < n, otherwise default to standard RELU
if self.k >= self.n:
return F.relu(x)
if self.training:
k = self.k
else:
k = min(int(round(self.k * self.kInferenceFactor)), self.n)
x = k_winners.apply(x, self.dutyCycle, k, self.boostStrength)
if self.training:
self.updateDutyCycle(x)
return x
def updateDutyCycle(self, x):
batchSize = x.shape[0]
self.learningIterations += batchSize
period = min(self.dutyCyclePeriod, self.learningIterations)
self.dutyCycle.mul_(period - batchSize)
self.dutyCycle.add_(x.gt(0).sum(dim=0, dtype=torch.float))
self.dutyCycle.div_(period)
class KWinners2d(KWinnersBase):
"""
Applies K-Winner function to the input tensor
See :class:`htmresearch.frameworks.pytorch.functions.k_winners2d`
"""
def __init__(self, n, k, channels, kInferenceFactor=1.0, boostStrength=1.0,
boostStrengthFactor=1.0, dutyCyclePeriod=1000):
"""
:param n:
Number of units. Usually the output of the max pool or whichever layer
preceding the KWinners2d layer.
:type n: int
:param k:
The activity of the top k units will be allowed to remain, the rest are set
to zero
:type k: int
:param channels:
Number of channels (filters) in the convolutional layer.
:type channels: int
:param kInferenceFactor:
During inference (training=False) we increase k by this factor.
:type kInferenceFactor: float
:param boostStrength:
boost strength (0.0 implies no boosting).
:type boostStrength: float
:param boostStrengthFactor:
Boost strength factor to use [0..1]
:type boostStrengthFactor: float
:param dutyCyclePeriod:
The period used to calculate duty cycles
:type dutyCyclePeriod: int
"""
super(KWinners2d, self).__init__(n=n, k=k,
kInferenceFactor=kInferenceFactor,
boostStrength=boostStrength,
boostStrengthFactor=boostStrengthFactor,
dutyCyclePeriod=dutyCyclePeriod)
self.channels = channels
self.register_buffer("dutyCycle", torch.zeros((1, channels, 1, 1)))
def forward(self, x):
# Apply k-winner algorithm if k < n, otherwise default to standard RELU
if self.k >= self.n:
return F.relu(x)
if self.training:
k = self.k
else:
k = min(int(round(self.k * self.kInferenceFactor)), self.n)
x = k_winners2d.apply(x, self.dutyCycle, k, self.boostStrength)
if self.training:
self.updateDutyCycle(x)
return x
def updateDutyCycle(self, x):
batchSize = x.shape[0]
self.learningIterations += batchSize
scaleFactor = float(x.shape[2] * x.shape[3])
period = min(self.dutyCyclePeriod, self.learningIterations)
self.dutyCycle.mul_(period - batchSize)
s = x.gt(0).sum(dim=(0, 2, 3), dtype=torch.float) / scaleFactor
self.dutyCycle.reshape(-1).add_(s)
self.dutyCycle.div_(period)
def entropy(self):
entropy = super(KWinners2d, self).entropy()
return entropy * self.n / self.channels
|
numenta/htmresearch
|
htmresearch/frameworks/pytorch/modules/k_winners.py
|
Python
|
agpl-3.0
| 9,177
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import pwd
import sys
import ConfigParser
from string import ascii_letters, digits
# copied from utils, avoid circular reference fun :)
def mk_boolean(value):
if value is None:
return False
val = str(value)
if val.lower() in [ "true", "t", "y", "1", "yes" ]:
return True
else:
return False
def get_config(p, section, key, env_var, default, boolean=False, integer=False, floating=False):
''' return a configuration variable with casting '''
value = _get_config(p, section, key, env_var, default)
if boolean:
return mk_boolean(value)
if value and integer:
return int(value)
if value and floating:
return float(value)
return value
def _get_config(p, section, key, env_var, default):
''' helper function for get_config '''
if env_var is not None:
value = os.environ.get(env_var, None)
if value is not None:
return value
if p is not None:
try:
return p.get(section, key, raw=True)
except:
return default
return default
def load_config_file():
''' Load Config File order(first found is used): ENV, CWD, HOME, /etc/ansible '''
p = ConfigParser.ConfigParser()
path0 = os.getenv("ANSIBLE_CONFIG", None)
if path0 is not None:
path0 = os.path.expanduser(path0)
path1 = os.getcwd() + "/ansible.cfg"
path2 = os.path.expanduser("~/.ansible.cfg")
path3 = "/etc/ansible/ansible.cfg"
for path in [path0, path1, path2, path3]:
if path is not None and os.path.exists(path):
p.read(path)
return p
return None
def shell_expand_path(path):
''' shell_expand_path is needed as os.path.expanduser does not work
when path is None, which is the default for ANSIBLE_PRIVATE_KEY_FILE '''
if path:
path = os.path.expanduser(path)
return path
p = load_config_file()
active_user = pwd.getpwuid(os.geteuid())[0]
# Needed so the RPM can call setup.py and have modules land in the
# correct location. See #1277 for discussion
if getattr(sys, "real_prefix", None):
# in a virtualenv
DIST_MODULE_PATH = os.path.join(sys.prefix, 'share/ansible/')
else:
DIST_MODULE_PATH = '/usr/share/ansible/'
# check all of these extensions when looking for yaml files for things like
# group variables
YAML_FILENAME_EXTENSIONS = [ "", ".yml", ".yaml" ]
# sections in config file
DEFAULTS='defaults'
# configurable things
DEFAULT_HOST_LIST = shell_expand_path(get_config(p, DEFAULTS, 'hostfile', 'ANSIBLE_HOSTS', '/etc/ansible/hosts'))
DEFAULT_MODULE_PATH = get_config(p, DEFAULTS, 'library', 'ANSIBLE_LIBRARY', DIST_MODULE_PATH)
DEFAULT_ROLES_PATH = get_config(p, DEFAULTS, 'roles_path', 'ANSIBLE_ROLES_PATH', '/etc/ansible/roles')
DEFAULT_REMOTE_TMP = shell_expand_path(get_config(p, DEFAULTS, 'remote_tmp', 'ANSIBLE_REMOTE_TEMP', '$HOME/.ansible/tmp'))
DEFAULT_MODULE_NAME = get_config(p, DEFAULTS, 'module_name', None, 'command')
DEFAULT_PATTERN = get_config(p, DEFAULTS, 'pattern', None, '*')
DEFAULT_FORKS = get_config(p, DEFAULTS, 'forks', 'ANSIBLE_FORKS', 5, integer=True)
DEFAULT_MODULE_ARGS = get_config(p, DEFAULTS, 'module_args', 'ANSIBLE_MODULE_ARGS', '')
DEFAULT_MODULE_LANG = get_config(p, DEFAULTS, 'module_lang', 'ANSIBLE_MODULE_LANG', 'C')
DEFAULT_TIMEOUT = get_config(p, DEFAULTS, 'timeout', 'ANSIBLE_TIMEOUT', 10, integer=True)
DEFAULT_POLL_INTERVAL = get_config(p, DEFAULTS, 'poll_interval', 'ANSIBLE_POLL_INTERVAL', 15, integer=True)
DEFAULT_REMOTE_USER = get_config(p, DEFAULTS, 'remote_user', 'ANSIBLE_REMOTE_USER', active_user)
DEFAULT_ASK_PASS = get_config(p, DEFAULTS, 'ask_pass', 'ANSIBLE_ASK_PASS', False, boolean=True)
DEFAULT_PRIVATE_KEY_FILE = shell_expand_path(get_config(p, DEFAULTS, 'private_key_file', 'ANSIBLE_PRIVATE_KEY_FILE', None))
DEFAULT_SUDO_USER = get_config(p, DEFAULTS, 'sudo_user', 'ANSIBLE_SUDO_USER', 'root')
DEFAULT_ASK_SUDO_PASS = get_config(p, DEFAULTS, 'ask_sudo_pass', 'ANSIBLE_ASK_SUDO_PASS', False, boolean=True)
DEFAULT_REMOTE_PORT = get_config(p, DEFAULTS, 'remote_port', 'ANSIBLE_REMOTE_PORT', None, integer=True)
DEFAULT_ASK_VAULT_PASS = get_config(p, DEFAULTS, 'ask_vault_pass', 'ANSIBLE_ASK_VAULT_PASS', False, boolean=True)
DEFAULT_TRANSPORT = get_config(p, DEFAULTS, 'transport', 'ANSIBLE_TRANSPORT', 'smart')
DEFAULT_SCP_IF_SSH = get_config(p, 'ssh_connection', 'scp_if_ssh', 'ANSIBLE_SCP_IF_SSH', False, boolean=True)
DEFAULT_MANAGED_STR = get_config(p, DEFAULTS, 'ansible_managed', None, 'Ansible managed: {file} modified on %Y-%m-%d %H:%M:%S by {uid} on {host}')
DEFAULT_SYSLOG_FACILITY = get_config(p, DEFAULTS, 'syslog_facility', 'ANSIBLE_SYSLOG_FACILITY', 'LOG_USER')
DEFAULT_KEEP_REMOTE_FILES = get_config(p, DEFAULTS, 'keep_remote_files', 'ANSIBLE_KEEP_REMOTE_FILES', False, boolean=True)
DEFAULT_SUDO = get_config(p, DEFAULTS, 'sudo', 'ANSIBLE_SUDO', False, boolean=True)
DEFAULT_SUDO_EXE = get_config(p, DEFAULTS, 'sudo_exe', 'ANSIBLE_SUDO_EXE', 'sudo')
DEFAULT_SUDO_FLAGS = get_config(p, DEFAULTS, 'sudo_flags', 'ANSIBLE_SUDO_FLAGS', '-H')
DEFAULT_HASH_BEHAVIOUR = get_config(p, DEFAULTS, 'hash_behaviour', 'ANSIBLE_HASH_BEHAVIOUR', 'replace')
DEFAULT_LEGACY_PLAYBOOK_VARIABLES = get_config(p, DEFAULTS, 'legacy_playbook_variables', 'ANSIBLE_LEGACY_PLAYBOOK_VARIABLES', True, boolean=True)
DEFAULT_JINJA2_EXTENSIONS = get_config(p, DEFAULTS, 'jinja2_extensions', 'ANSIBLE_JINJA2_EXTENSIONS', None)
DEFAULT_EXECUTABLE = get_config(p, DEFAULTS, 'executable', 'ANSIBLE_EXECUTABLE', '/bin/sh')
DEFAULT_SU_EXE = get_config(p, DEFAULTS, 'su_exe', 'ANSIBLE_SU_EXE', 'su')
DEFAULT_SU = get_config(p, DEFAULTS, 'su', 'ANSIBLE_SU', False, boolean=True)
DEFAULT_SU_FLAGS = get_config(p, DEFAULTS, 'su_flags', 'ANSIBLE_SU_FLAGS', '')
DEFAULT_SU_USER = get_config(p, DEFAULTS, 'su_user', 'ANSIBLE_SU_USER', 'root')
DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_SU_PASS', False, boolean=True)
DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '/usr/share/ansible_plugins/action_plugins')
DEFAULT_CALLBACK_PLUGIN_PATH = get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS', '/usr/share/ansible_plugins/callback_plugins')
DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS', '/usr/share/ansible_plugins/connection_plugins')
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '/usr/share/ansible_plugins/lookup_plugins')
DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', 'ANSIBLE_VARS_PLUGINS', '/usr/share/ansible_plugins/vars_plugins')
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '/usr/share/ansible_plugins/filter_plugins')
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
# URL Arguments for generic module urllib2 use
DEFAULT_HTTP_USER_AGENT = get_config(p, DEFAULTS, 'http_user_agent', 'ANSIBLE_HTTP_USER_AGENT', 'ansible-agent')
DEFAULT_CA_FILE_PATH = shell_expand_path(get_config(p, DEFAULTS, 'ca_file_path', 'ANSIBLE_CA_FILE_PATH', ''))
ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, boolean=True)
ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, boolean=True)
DISPLAY_SKIPPED_HOSTS = get_config(p, DEFAULTS, 'display_skipped_hosts', 'DISPLAY_SKIPPED_HOSTS', True, boolean=True)
DEFAULT_UNDEFINED_VAR_BEHAVIOR = get_config(p, DEFAULTS, 'error_on_undefined_vars', 'ANSIBLE_ERROR_ON_UNDEFINED_VARS', True, boolean=True)
HOST_KEY_CHECKING = get_config(p, DEFAULTS, 'host_key_checking', 'ANSIBLE_HOST_KEY_CHECKING', True, boolean=True)
DEPRECATION_WARNINGS = get_config(p, DEFAULTS, 'deprecation_warnings', 'ANSIBLE_DEPRECATION_WARNINGS', True, boolean=True)
# CONNECTION RELATED
ANSIBLE_SSH_ARGS = get_config(p, 'ssh_connection', 'ssh_args', 'ANSIBLE_SSH_ARGS', None)
ANSIBLE_SSH_CONTROL_PATH = get_config(p, 'ssh_connection', 'control_path', 'ANSIBLE_SSH_CONTROL_PATH', "%(directory)s/ansible-ssh-%%h-%%p-%%r")
ANSIBLE_SSH_PIPELINING = get_config(p, 'ssh_connection', 'pipelining', 'ANSIBLE_SSH_PIPELINING', False, boolean=True)
PARAMIKO_RECORD_HOST_KEYS = get_config(p, 'paramiko_connection', 'record_host_keys', 'ANSIBLE_PARAMIKO_RECORD_HOST_KEYS', True, boolean=True)
# obsolete -- will be formally removed in 1.6
ZEROMQ_PORT = get_config(p, 'fireball_connection', 'zeromq_port', 'ANSIBLE_ZEROMQ_PORT', 5099, integer=True)
ACCELERATE_PORT = get_config(p, 'accelerate', 'accelerate_port', 'ACCELERATE_PORT', 5099, integer=True)
ACCELERATE_TIMEOUT = get_config(p, 'accelerate', 'accelerate_timeout', 'ACCELERATE_TIMEOUT', 30, integer=True)
ACCELERATE_CONNECT_TIMEOUT = get_config(p, 'accelerate', 'accelerate_connect_timeout', 'ACCELERATE_CONNECT_TIMEOUT', 1.0, floating=True)
ACCELERATE_KEYS_DIR = get_config(p, 'accelerate', 'accelerate_keys_dir', 'ACCELERATE_KEYS_DIR', '~/.fireball.keys')
ACCELERATE_KEYS_DIR_PERMS = get_config(p, 'accelerate', 'accelerate_keys_dir_perms', 'ACCELERATE_KEYS_DIR_PERMS', '700')
ACCELERATE_KEYS_FILE_PERMS = get_config(p, 'accelerate', 'accelerate_keys_file_perms', 'ACCELERATE_KEYS_FILE_PERMS', '600')
PARAMIKO_PTY = get_config(p, 'paramiko_connection', 'pty', 'ANSIBLE_PARAMIKO_PTY', True, boolean=True)
# characters included in auto-generated passwords
DEFAULT_PASSWORD_CHARS = ascii_letters + digits + ".,:-_"
# non-configurable things
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
DEFAULT_SUBSET = None
DEFAULT_SU_PASS = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
|
mitodl/ansible
|
lib/ansible/constants.py
|
Python
|
gpl-3.0
| 11,103
|
# encoding: utf-8
from functools import reduce
import grid_image_provider
from ice.logs import logger
class CombinedProvider(grid_image_provider.GridImageProvider):
def __init__(self, *args):
"""
Creates a CombinedProvider out of the providers that were passed in `args`
ORDER MATTERS. `image_for_rom` will return the first non-None result from
a provider. So if you want to check the users filesystem but check
ConsoleGrid if nothing is found then you would do
CombinedProvider(LocalProvider(), ConsoleGridProvider())
But if you wanted to, say, use ConsoleGrid but show a placeholder image in
the case of an error you would do
CombinedProvider(ConsoleGridProvider(), PlaceholderProvider())
"""
self.providers = args
def _enabled_providers(self):
return filter(lambda provider: provider.is_enabled(), self.providers)
def is_enabled(self):
"""
Returns True if any child provider is enabled
"""
return len(self._enabled_providers()) > 0
def image_for_rom(self, rom):
"""
Returns the first image found
"""
return reduce(lambda image, provider: image if image else provider.image_for_rom(
rom), self._enabled_providers(), None)
|
scottrice/Ice
|
ice/gridproviders/combined_provider.py
|
Python
|
mit
| 1,232
|
import _plotly_utils.basevalidators
class OpacityValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="opacity", parent_name="barpolar", **kwargs):
super(OpacityValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "style"),
max=kwargs.pop("max", 1),
min=kwargs.pop("min", 0),
role=kwargs.pop("role", "style"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/barpolar/_opacity.py
|
Python
|
mit
| 520
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
import os
import shutil
from lib_apk_shrink.model.DecompileConfig import DecompileConfig
__author__ = 'tiantong'
class JarDecompile(object):
decompile_config = DecompileConfig()
output_path_decompile = ""
output_path_out = ""
def __init__(self, decompileConfig=DecompileConfig()):
self.decompile_config = decompileConfig
def decompile_jar(self):
jad_path = self.decompile_config.jad_path
output_path = self.decompile_config.output_path
# 删除以前的文件夹
self.output_path_out = os.path.join(output_path, 'out')
self.output_path_decompile = os.path.join(output_path, 'decompile')
self.clearOutput();
# 确保文件夹存在
if not os.path.exists(self.output_path_out):
os.makedirs(self.output_path_out)
# 删除无用的 META-INF
META_path = os.path.join(self.output_path_out, 'META-INF')
# 解压jar
for jar_path in self.decompile_config.extra_jar:
command = 'unzip -o ' + jar_path + ' -d ' + self.output_path_out
result = os.popen(command).read()
self.delDir(META_path)
# 反编译
command = jad_path + ' -r -o -ff -d ' + self.output_path_decompile + ' -s java ' + self.output_path_out + '/**/*.class'
result = os.popen(command).read()
def delDir(self, dir):
if os.path.isdir(dir):
shutil.rmtree(dir)
def clearOutput(self):
self.delDir(self.output_path_decompile)
self.delDir(self.output_path_out)
|
wtttc/apkshrink
|
lib_apk_shrink/instrument/JarDecompile.py
|
Python
|
mit
| 1,608
|
#!/usr/bin/python
# -*- coding: utf-8; tab-width: 4; indent-tabs-mode: t -*-
"""
Role
====
Defines plugin managers that can handle configuration files similar to
the ini files manipulated by Python's ConfigParser module.
API
===
"""
from yapsy.IPlugin import IPlugin
from yapsy.PluginManagerDecorator import PluginManagerDecorator
from yapsy.PluginManager import PLUGIN_NAME_FORBIDEN_STRING
class ConfigurablePluginManager(PluginManagerDecorator):
"""
A plugin manager that also manages a configuration file.
The configuration file will be accessed through a ``ConfigParser``
derivated object. The file can be used for other purpose by the
application using this plugin manager as it will only add a new
specific section ``[Plugin Management]`` for itself and also new
sections for some plugins that will start with ``[Plugin:...]``
(only the plugins that explicitly requires to save configuration
options will have this kind of section).
.. warning:: when giving/building the list of plugins to activate by
default, there must not be any space in the list (neither in the
names nor in between)
"""
CONFIG_SECTION_NAME = "Plugin Management"
def __init__(self,
configparser_instance=None,
config_change_trigger= lambda x:True,
decorated_manager=None,
# The following args will only be used if we need to
# create a default PluginManager
categories_filter={"Default":IPlugin},
directories_list=None,
plugin_info_ext="yapsy-plugin"):
"""
Create the plugin manager and record the ConfigParser instance
that will be used afterwards.
The ``config_change_trigger`` argument can be used to set a
specific method to call when the configuration is
altered. This will let the client application manage the way
they want the configuration to be updated (e.g. write on file
at each change or at precise time intervalls or whatever....)
"""
# Create the base decorator class
PluginManagerDecorator.__init__(self,decorated_manager,
categories_filter,
directories_list,
plugin_info_ext)
self.setConfigParser(configparser_instance, config_change_trigger)
def setConfigParser(self,configparser_instance,config_change_trigger):
"""
Set the ConfigParser instance.
"""
self.config_parser = configparser_instance
# set the (optional) fucntion to be called when the
# configuration is changed:
self.config_has_changed = config_change_trigger
def __getCategoryPluginsListFromConfig(self, plugin_list_str):
"""
Parse the string describing the list of plugins to activate,
to discover their actual names and return them.
"""
return plugin_list_str.strip(" ").split("%s"%PLUGIN_NAME_FORBIDEN_STRING)
def __getCategoryPluginsConfigFromList(self, plugin_list):
"""
Compose a string describing the list of plugins to activate
"""
return PLUGIN_NAME_FORBIDEN_STRING.join(plugin_list)
def __getCategoryOptionsName(self,category_name):
"""
Return the appropirately formated version of the category's
option.
"""
return "%s_plugins_to_load" % category_name.replace(" ","_")
def __addPluginToConfig(self,category_name, plugin_name):
"""
Utility function to add a plugin to the list of plugin to be
activated.
"""
# check that the section is here
if not self.config_parser.has_section(self.CONFIG_SECTION_NAME):
self.config_parser.add_section(self.CONFIG_SECTION_NAME)
# check that the category's list of activated plugins is here too
option_name = self.__getCategoryOptionsName(category_name)
if not self.config_parser.has_option(self.CONFIG_SECTION_NAME, option_name):
# if there is no list yet add a new one
self.config_parser.set(self.CONFIG_SECTION_NAME,option_name,plugin_name)
return self.config_has_changed()
else:
# get the already existing list and append the new
# activated plugin to it.
past_list_str = self.config_parser.get(self.CONFIG_SECTION_NAME,option_name)
past_list = self.__getCategoryPluginsListFromConfig(past_list_str)
# make sure we don't add it twice
if plugin_name not in past_list:
past_list.append(plugin_name)
new_list_str = self.__getCategoryPluginsConfigFromList(past_list)
self.config_parser.set(self.CONFIG_SECTION_NAME,option_name,new_list_str)
return self.config_has_changed()
def __removePluginFromConfig(self,category_name, plugin_name):
"""
Utility function to add a plugin to the list of plugin to be
activated.
"""
# check that the section is here
if not self.config_parser.has_section(self.CONFIG_SECTION_NAME):
# then nothing to remove :)
return
# check that the category's list of activated plugins is here too
option_name = self.__getCategoryOptionsName(category_name)
if not self.config_parser.has_option(self.CONFIG_SECTION_NAME, option_name):
# if there is no list still nothing to do
return
else:
# get the already existing list
past_list_str = self.config_parser.get(self.CONFIG_SECTION_NAME,option_name)
past_list = self.__getCategoryPluginsListFromConfig(past_list_str)
if plugin_name in past_list:
past_list.remove(plugin_name)
new_list_str = self.__getCategoryPluginsConfigFromList(past_list)
self.config_parser.set(self.CONFIG_SECTION_NAME,option_name,new_list_str)
self.config_has_changed()
def registerOptionFromPlugin(self,
category_name, plugin_name,
option_name, option_value):
"""
To be called from a plugin object, register a given option in
the name of a given plugin.
"""
section_name = "%s Plugin: %s" % (category_name,plugin_name)
# if the plugin's section is not here yet, create it
if not self.config_parser.has_section(section_name):
self.config_parser.add_section(section_name)
# set the required option
self.config_parser.set(section_name,option_name,option_value)
self.config_has_changed()
def hasOptionFromPlugin(self,
category_name, plugin_name, option_name):
"""
To be called from a plugin object, return True if the option
has already been registered.
"""
section_name = "%s Plugin: %s" % (category_name,plugin_name)
return self.config_parser.has_section(section_name) and self.config_parser.has_option(section_name,option_name)
def readOptionFromPlugin(self,
category_name, plugin_name, option_name):
"""
To be called from a plugin object, read a given option in
the name of a given plugin.
"""
section_name = "%s Plugin: %s" % (category_name,plugin_name)
return self.config_parser.get(section_name,option_name)
def __decoratePluginObject(self, category_name, plugin_name, plugin_object):
"""
Add two methods to the plugin objects that will make it
possible for it to benefit from this class's api concerning
the management of the options.
"""
plugin_object.setConfigOption = lambda x,y: self.registerOptionFromPlugin(category_name,
plugin_name,
x,y)
plugin_object.setConfigOption.__doc__ = self.registerOptionFromPlugin.__doc__
plugin_object.getConfigOption = lambda x: self.readOptionFromPlugin(category_name,
plugin_name,
x)
plugin_object.getConfigOption.__doc__ = self.readOptionFromPlugin.__doc__
plugin_object.hasConfigOption = lambda x: self.hasOptionFromPlugin(category_name,
plugin_name,
x)
plugin_object.hasConfigOption.__doc__ = self.hasOptionFromPlugin.__doc__
def activatePluginByName(self, plugin_name, category_name="Default", save_state=True):
"""
Activate a plugin, , and remember it (in the config file).
If you want the plugin to benefit from the configuration
utility defined by this manager, it is crucial to use this
method to activate a plugin and not call the plugin object's
``activate`` method. In fact, this method will also "decorate"
the plugin object so that it can use this class's methods to
register its own options.
By default, the plugin's activation is registered in the
config file but if you d'ont want this set the 'save_state'
argument to False.
"""
# first decorate the plugin
pta = self._component.getPluginByName(plugin_name,category_name)
if pta is None:
return None
self.__decoratePluginObject(category_name,plugin_name,pta.plugin_object)
# activate the plugin
plugin_object = self._component.activatePluginByName(plugin_name,category_name)
# check the activation and then optionally set the config option
if plugin_object.is_activated:
if save_state:
self.__addPluginToConfig(category_name,plugin_name)
return plugin_object
return None
def deactivatePluginByName(self, plugin_name, category_name="Default", save_state=True):
"""
Deactivate a plugin, and remember it (in the config file).
By default, the plugin's deactivation is registered in the
config file but if you d'ont want this set the ``save_state``
argument to False.
"""
# activate the plugin
plugin_object = self._component.deactivatePluginByName(plugin_name,category_name)
if plugin_object is None:
return None
# check the deactivation and then optionnally set the config option
if not plugin_object.is_activated:
if save_state:
self.__removePluginFromConfig(category_name,plugin_name)
return plugin_object
return None
def loadPlugins(self,callback=None):
"""
Walk through the plugins' places and look for plugins. Then
for each plugin candidate look for its category, load it and
stores it in the appropriate slot of the ``category_mapping``.
"""
self._component.loadPlugins()
# now load the plugins according to the recorded configuration
if self.config_parser.has_section(self.CONFIG_SECTION_NAME):
# browse all the categories
for category_name in self._component.category_mapping.keys():
# get the list of plugins to be activated for this
# category
option_name = "%s_plugins_to_load"%category_name
if self.config_parser.has_option(self.CONFIG_SECTION_NAME,
option_name):
plugin_list_str = self.config_parser.get(self.CONFIG_SECTION_NAME,
option_name)
plugin_list = self.__getCategoryPluginsListFromConfig(plugin_list_str)
# activate all the plugins that should be
# activated
for plugin_name in plugin_list:
self.activatePluginByName(plugin_name,category_name)
|
GeoDaCenter/CAST
|
yapsy/ConfigurablePluginManager.py
|
Python
|
gpl-3.0
| 10,403
|
def main():
print "hola"
print "Como te llmas?"
nombre = raw_input()
print "Buenos dias", nombre
print "Que edad tienes?"
edad = raw_input()
print "que bien te conservas para tener", edad
main()
|
Djhacker18/Python
|
Practica1Conver.py
|
Python
|
gpl-2.0
| 202
|
import unittest
from unittest import mock
from flumine.markets.middleware import (
Middleware,
SimulatedMiddleware,
RunnerAnalytics,
OrderStatus,
OrderTypes,
WIN_MINIMUM_ADJUSTMENT_FACTOR,
PLACE_MINIMUM_ADJUSTMENT_FACTOR,
LIVE_STATUS,
)
from flumine.order.ordertype import MarketOnCloseOrder
class MiddlewareTest(unittest.TestCase):
def setUp(self) -> None:
self.middleware = Middleware()
def test_call(self):
self.middleware(None)
def test_add_market(self):
mock_market = mock.Mock()
self.assertIsNone(self.middleware.add_market(mock_market))
def test_remove_market(self):
mock_market = mock.Mock()
self.assertIsNone(self.middleware.remove_market(mock_market))
class SimulatedMiddlewareTest(unittest.TestCase):
def setUp(self) -> None:
self.middleware = SimulatedMiddleware()
def test_init(self):
self.assertEqual(self.middleware.markets, {})
self.assertEqual(self.middleware._runner_removals, [])
self.assertEqual(WIN_MINIMUM_ADJUSTMENT_FACTOR, 2.5)
self.assertEqual(PLACE_MINIMUM_ADJUSTMENT_FACTOR, 0)
self.assertEqual(
LIVE_STATUS,
[
OrderStatus.EXECUTABLE,
OrderStatus.CANCELLING,
OrderStatus.UPDATING,
OrderStatus.REPLACING,
],
)
@mock.patch(
"flumine.markets.middleware.SimulatedMiddleware._process_simulated_orders"
)
@mock.patch("flumine.markets.middleware.SimulatedMiddleware._process_runner")
def test_call(self, mock__process_runner, mock__process_simulated_orders):
mock_market = mock.Mock(context={})
mock_market_book = mock.Mock()
mock_runner = mock.Mock(status="ACTIVE")
mock_market_book.runners = [mock_runner]
mock_market.market_book = mock_market_book
self.middleware(mock_market)
mock__process_runner.assert_called_with({}, mock_runner, True)
self.assertEqual(mock_market.context, {"simulated": {}})
mock__process_simulated_orders.assert_called_with(mock_market, {})
@mock.patch(
"flumine.markets.middleware.SimulatedMiddleware._process_simulated_orders"
)
@mock.patch(
"flumine.markets.middleware.SimulatedMiddleware._process_runner_removal"
)
def test_call_non_runner(
self, mock__process_runner_removal, mock__process_simulated_orders
):
mock_market = mock.Mock(context={})
mock_market_book = mock.Mock()
mock_runner = mock.Mock(status="REMOVED")
mock_market_book.runners = [mock_runner]
mock_market.market_book = mock_market_book
self.middleware(mock_market)
self.assertEqual(
self.middleware._runner_removals,
[
(
mock_runner.selection_id,
mock_runner.handicap,
mock_runner.adjustment_factor,
)
],
)
mock__process_runner_removal.assert_called_with(
mock_market,
mock_runner.selection_id,
mock_runner.handicap,
mock_runner.adjustment_factor,
)
def test_remove_market(self):
mock_market = mock.Mock(market_id="1.23")
self.middleware.markets = {mock_market.market_id: []}
self.middleware.remove_market(mock_market)
self.middleware.remove_market(mock_market)
self.assertEqual(self.middleware.markets, {})
def test__process_runner_removal(self):
mock_simulated = mock.MagicMock(matched=[[123, 8.6, 10]])
mock_simulated.__bool__.return_value = True
mock_order = mock.Mock(simulated=mock_simulated, info={})
mock_simulated_two = mock.MagicMock(matched=[[123, 8.6, 10]])
mock_simulated_two.__bool__.return_value = False
mock_order_two = mock.Mock(simulated=mock_simulated_two, info={})
mock_market = mock.Mock(blotter=[mock_order, mock_order_two])
self.middleware._process_runner_removal(mock_market, 12345, 0, 16.2)
self.assertEqual(mock_order.simulated.matched, [[123, 7.21, 10]])
self.assertEqual(mock_order.simulated.average_price_matched, 7.21)
self.assertEqual(mock_order_two.simulated.matched, [[123, 8.6, 10]])
def test__process_runner_removal_under_limit(self):
mock_simulated = mock.MagicMock(matched=[[123, 8.6, 10]])
mock_simulated.__bool__.return_value = True
mock_order = mock.Mock(simulated=mock_simulated)
mock_market = mock.Mock(blotter=[mock_order])
self.middleware._process_runner_removal(mock_market, 12345, 0, 2.4)
self.assertEqual(mock_order.simulated.matched, [[123, 8.6, 10]])
def test__process_runner_removal_void(self):
mock_simulated = mock.MagicMock(matched=[[123, 8.6, 10]])
mock_simulated.__bool__.return_value = True
mock_order = mock.Mock(
lookup=("1.23", 12345, 0), simulated=mock_simulated, info={}
)
mock_order.order_type.size = 10
mock_order.order_type.ORDER_TYPE = OrderTypes.LIMIT
mock_market = mock.Mock(market_id="1.23", blotter=[mock_order])
self.middleware._process_runner_removal(mock_market, 12345, 0, 16.2)
self.assertEqual(mock_order.simulated.size_matched, 0)
self.assertEqual(mock_order.simulated.average_price_matched, 0)
self.assertEqual(mock_order.simulated.matched, [])
self.assertEqual(mock_order.simulated.size_voided, 10)
def test__process_runner_removal_none(self):
mock_simulated = mock.MagicMock(matched=[[123, 8.6, 10]])
mock_simulated.__bool__.return_value = True
mock_order = mock.Mock(simulated=mock_simulated)
mock_market = mock.Mock(blotter=[mock_order])
self.middleware._process_runner_removal(mock_market, 12345, 0, None)
self.assertEqual(mock_order.simulated.matched, [[123, 8.6, 10]])
def test__process_runner_removal_sp_win(self):
order_type = MarketOnCloseOrder(liability=200)
mock_order = mock.Mock(
selection_id=1234,
handicap=0,
order_type=order_type,
info={},
side="LAY",
current_order=mock.Mock(size_matched=0),
average_price_matched=None,
)
mock_market_book = mock.Mock()
mock_market_book.runners = [
mock.Mock(selection_id=1234, handicap=0, adjustment_factor=20)
]
mock_market = mock.Mock(
market_type="WIN", blotter=[mock_order], market_book=mock_market_book
)
self.middleware._process_runner_removal(mock_market, 12345, 0, 50)
# The liability of £200 is adjusted by the multiplier of 37.5%, which s
# defined in the example here: https://github.com/liampauling/flumine/issues/454
self.assertEqual(mock_order.order_type.liability, 75)
def test__process_runner_removal_sp_win_inplay(self):
order_type = MarketOnCloseOrder(liability=200)
mock_order = mock.Mock(
selection_id=1234,
handicap=0,
order_type=order_type,
info={},
side="LAY",
current_order=mock.Mock(size_matched=0),
average_price_matched=10,
)
mock_market_book = mock.Mock()
mock_market_book.runners = [
mock.Mock(selection_id=1234, handicap=0, adjustment_factor=20)
]
mock_market = mock.Mock(
market_type="WIN", blotter=[mock_order], market_book=mock_market_book
)
self.middleware._process_runner_removal(mock_market, 12345, 0, 50)
# The liability of £200 is adjusted by the multiplier of 37.5%, which s
# defined in the example here: https://github.com/liampauling/flumine/issues/454
self.assertEqual(mock_order.order_type.liability, 75)
# Size matched should be 75 / (10.0-1.0) \approx 8.33
self.assertEqual(8.33, mock_order.current_order.size_matched)
def test__process_runner_removal_sp_place(self):
order_type = MarketOnCloseOrder(liability=200)
mock_order = mock.Mock(
selection_id=1234,
handicap=0,
order_type=order_type,
info={},
side="LAY",
current_order=mock.Mock(size_matched=0),
average_price_matched=None,
)
mock_market_book = mock.Mock()
mock_market_book.runners = [
mock.Mock(selection_id=1234, handicap=0, adjustment_factor=20)
]
mock_market = mock.Mock(
market_type="PLACE", blotter=[mock_order], market_book=mock_market_book
)
self.middleware._process_runner_removal(mock_market, 12345, 0, 50)
# The liability of £200 is reduced by the non runner's adjustment factor of 50%
self.assertEqual(mock_order.order_type.liability, 100)
def test__process_runner_removal_sp_place_inplay(self):
order_type = MarketOnCloseOrder(liability=200)
mock_order = mock.Mock(
selection_id=1234,
handicap=0,
order_type=order_type,
info={},
side="LAY",
current_order=mock.Mock(size_matched=0),
average_price_matched=10.0,
)
mock_market_book = mock.Mock()
mock_market_book.runners = [
mock.Mock(selection_id=1234, handicap=0, adjustment_factor=20)
]
mock_market = mock.Mock(
market_type="PLACE", blotter=[mock_order], market_book=mock_market_book
)
self.middleware._process_runner_removal(mock_market, 12345, 0, 50)
# The liability of £200 is reduced by the non runner's adjustment factor of 50%
self.assertEqual(mock_order.order_type.liability, 100)
# Size matched should be 100 / (10.0-1.0) \approx 11.11
self.assertEqual(11.11, mock_order.current_order.size_matched)
def test__process_streaming_update(self):
mock_market_book = mock.Mock(
streaming_update={"img": True, "rc": [{"id": 3}, {"id": 4}]},
runners=[mock.Mock(selection_id=1), mock.Mock(selection_id=2)],
)
self.assertEqual(
self.middleware._process_streaming_update(mock_market_book), [1, 2]
)
mock_market_book = mock.Mock(
streaming_update={"marketDefinition": {1: 2}, "rc": [{"id": 3}, {"id": 4}]},
runners=[mock.Mock(selection_id=1), mock.Mock(selection_id=2)],
)
self.assertEqual(
self.middleware._process_streaming_update(mock_market_book), [1, 2]
)
mock_market_book = mock.Mock(
streaming_update={"rc": [{"id": 3}, {"id": 4}]},
runners=[mock.Mock(selection_id=1), mock.Mock(selection_id=2)],
)
self.assertEqual(
self.middleware._process_streaming_update(mock_market_book), [3, 4]
)
def test__calculate_reduction_factor(self):
self.assertEqual(self.middleware._calculate_reduction_factor(10, 10), 9)
self.assertEqual(self.middleware._calculate_reduction_factor(1000, 0), 1000)
self.assertEqual(self.middleware._calculate_reduction_factor(1000, 5), 950)
self.assertEqual(self.middleware._calculate_reduction_factor(3.2, 75.18), 1.01)
self.assertEqual(self.middleware._calculate_reduction_factor(10, 75.18), 2.48)
self.assertEqual(self.middleware._calculate_reduction_factor(1.01, 75.18), 1.01)
@mock.patch("flumine.markets.middleware.config")
def test__process_simulated_orders_strategy_isolation(self, mock_config):
mock_config.simulated_strategy_isolation = True
mock_market_book = mock.Mock()
mock_market = mock.Mock()
mock_order = mock.Mock(
selection_id=123, handicap=1, status=OrderStatus.EXECUTABLE, side="LAY"
)
mock_order.order_type.price = 1.02
mock_order.order_type.ORDER_TYPE = OrderTypes.LIMIT
mock_order_two = mock.Mock(
selection_id=123, handicap=1, status=OrderStatus.PENDING
)
mock_order_three = mock.Mock(
selection_id=123, handicap=1, status=OrderStatus.EXECUTABLE, simulated=False
)
mock_market.blotter._strategy_orders = {
"test": [mock_order, mock_order_two, mock_order_three]
}
mock_market_analytics = {
(mock_order.selection_id, mock_order.handicap): mock.Mock(traded={1: 2})
}
mock_market.market_book = mock_market_book
self.middleware._process_simulated_orders(mock_market, mock_market_analytics)
mock_order.simulated.assert_called_with(mock_market_book, {1: 2})
mock_order_two.simulated.assert_not_called()
@mock.patch("flumine.markets.middleware.config")
def test__process_simulated_orders(self, mock_config):
mock_config.simulated_strategy_isolation = False
mock_market_book = mock.Mock()
mock_market = mock.Mock()
mock_order = mock.Mock(
selection_id=123, handicap=1, status=OrderStatus.EXECUTABLE, side="LAY"
)
mock_order.order_type.price = 1.02
mock_order.order_type.ORDER_TYPE = OrderTypes.LIMIT
mock_order_two = mock.Mock(
selection_id=123, handicap=1, status=OrderStatus.PENDING
)
mock_order_three = mock.Mock(
selection_id=123, handicap=1, status=OrderStatus.EXECUTABLE, simulated=False
)
mock_market.blotter.live_orders = [
mock_order,
mock_order_two,
mock_order_three,
]
mock_market_analytics = {
(mock_order.selection_id, mock_order.handicap): mock.Mock(traded={1: 2})
}
mock_market.market_book = mock_market_book
self.middleware._process_simulated_orders(mock_market, mock_market_analytics)
mock_order.simulated.assert_called_with(mock_market_book, {1: 2})
mock_order_two.simulated.assert_not_called()
def test__sort_orders(self):
order_one = mock.Mock(side="LAY", bet_id=1)
order_one.order_type.price = 1.01
order_two = mock.Mock(side="LAY", bet_id=2)
order_two.order_type.price = 1.02
order_three = mock.Mock(side="LAY", bet_id=3)
order_three.order_type.price = 1.01
order_four = mock.Mock(side="BACK", bet_id=4)
order_four.order_type.price = 1.2
order_five = mock.Mock(side="BACK", bet_id=5)
order_five.order_type.price = 1.2
order_six = mock.Mock(side="BACK", bet_id=6)
order_six.order_type.price = 1.19
order_seven = mock.Mock(side="BACK", bet_id=6)
order_seven.order_type.price = "ERROR"
order_seven.order_type.ORDER_TYPE = OrderTypes.MARKET_ON_CLOSE
orders = [
order_one,
order_two,
order_three,
order_four,
order_five,
order_six,
order_seven,
]
self.assertEqual(
self.middleware._sort_orders(orders),
[
order_two,
order_one,
order_three,
order_six,
order_four,
order_five,
order_seven,
],
)
@mock.patch("flumine.markets.middleware.RunnerAnalytics")
def test__process_runner(self, mock_runner_analytics):
market_analytics = {}
mock_runner = mock.Mock()
self.middleware._process_runner(market_analytics, mock_runner, True)
self.assertEqual(len(market_analytics), 1)
self.middleware._process_runner(market_analytics, mock_runner, False)
self.assertEqual(len(market_analytics), 1)
mock_runner_analytics.assert_called_with(mock_runner)
mock_runner_analytics().assert_called_with(mock_runner, False)
class RunnerAnalyticsTest(unittest.TestCase):
def setUp(self) -> None:
self.mock_runner = mock.Mock()
self.mock_runner.ex.traded_volume = [{"price": 1.01, "size": 2}]
self.runner_analytics = RunnerAnalytics(self.mock_runner)
def test_init(self):
self.assertEqual(self.runner_analytics._runner, self.mock_runner)
self.assertEqual(
self.runner_analytics._traded_volume, self.mock_runner.ex.traded_volume
)
self.assertEqual(self.runner_analytics.traded, {})
self.assertEqual(self.runner_analytics.matched, 0)
self.assertIsNone(self.runner_analytics.middle)
self.assertEqual(self.runner_analytics._p_v, {1.01: 2})
@mock.patch("flumine.markets.middleware.RunnerAnalytics._calculate_matched")
@mock.patch("flumine.markets.middleware.RunnerAnalytics._calculate_middle")
@mock.patch("flumine.markets.middleware.RunnerAnalytics._calculate_traded")
def test_call(
self, mock__calculate_traded, mock__calculate_middle, mock__calculate_matched
):
mock_runner = mock.Mock()
self.runner_analytics(mock_runner, True)
mock__calculate_traded.assert_called_with(mock_runner.ex.traded_volume)
mock__calculate_middle.assert_called_with(self.mock_runner)
mock__calculate_matched.assert_called_with(mock_runner)
self.assertEqual(
self.runner_analytics._traded_volume, mock_runner.ex.traded_volume
)
self.assertEqual(self.runner_analytics.middle, mock__calculate_middle())
self.assertEqual(self.runner_analytics.matched, mock__calculate_matched())
self.assertEqual(self.runner_analytics.traded, mock__calculate_traded())
self.assertEqual(self.runner_analytics._runner, mock_runner)
@mock.patch("flumine.markets.middleware.RunnerAnalytics._calculate_matched")
@mock.patch("flumine.markets.middleware.RunnerAnalytics._calculate_middle")
@mock.patch("flumine.markets.middleware.RunnerAnalytics._calculate_traded")
def test_call_no_update(
self, mock__calculate_traded, mock__calculate_middle, mock__calculate_matched
):
mock_runner = mock.Mock()
self.runner_analytics(mock_runner, False)
mock__calculate_traded.assert_not_called()
mock__calculate_middle.assert_not_called()
mock__calculate_matched.assert_not_called()
self.assertEqual(self.runner_analytics.matched, 0)
self.assertEqual(self.runner_analytics.traded, {})
def test__calculate_traded_dict_empty(self):
self.runner_analytics._traded_volume = []
self.assertEqual(self.runner_analytics._calculate_traded([]), {})
def test__calculate_traded_dict_same(self):
traded_volume = [{"price": 1.01, "size": 69}]
self.runner_analytics._traded_volume = [{"price": 1.01, "size": 69}]
self.runner_analytics._p_v = {1.01: 69}
self.assertEqual(self.runner_analytics._calculate_traded(traded_volume), {})
self.assertEqual(self.runner_analytics._p_v, {1.01: 69})
def test__calculate_traded_dict_new(self):
traded_volume = [{"price": 1.01, "size": 69}]
self.runner_analytics._traded_volume = []
self.assertEqual(
self.runner_analytics._calculate_traded(traded_volume), {1.01: 67.0}
)
self.assertEqual(self.runner_analytics._p_v, {1.01: 69})
def test__calculate_traded_dict_new_multi(self):
traded_volume = [
{"price": 1.01, "size": 69},
{"price": 10, "size": 32},
]
self.runner_analytics._traded_volume = [{"price": 1.01, "size": 30}]
self.runner_analytics._p_v = {1.01: 30}
self.assertEqual(
self.runner_analytics._calculate_traded(traded_volume),
{1.01: 39.0, 10: 32},
)
self.assertEqual(self.runner_analytics._p_v, {1.01: 69, 10: 32})
def test__calculate_middle(self):
mock_runner = mock.Mock()
mock_runner.ex.available_to_back = []
mock_runner.ex.available_to_lay = []
self.assertEqual(self.runner_analytics._calculate_middle(mock_runner), 500.5)
mock_runner.ex.available_to_back = [{"price": 2.00}]
mock_runner.ex.available_to_lay = [{"price": 2.02}]
self.assertEqual(self.runner_analytics._calculate_middle(mock_runner), 2.01)
mock_runner.ex.available_to_back = [{"price": 10.00}]
mock_runner.ex.available_to_lay = [{"price": 15.5}]
self.assertEqual(self.runner_analytics._calculate_middle(mock_runner), 12.75)
def test__calculate_matched(self):
self.runner_analytics._runner.total_matched = 12344
mock_runner = mock.Mock(total_matched=12345)
self.assertEqual(self.runner_analytics._calculate_matched(mock_runner), 1)
self.runner_analytics._runner = mock_runner
self.assertEqual(self.runner_analytics._calculate_matched(mock_runner), 0)
def test__calculate_matched_runner_removal(self):
self.runner_analytics._runner.total_matched = 12344
mock_runner = mock.Mock(total_matched=0)
self.assertEqual(self.runner_analytics._calculate_matched(mock_runner), 0)
self.runner_analytics._runner = mock_runner
self.assertEqual(self.runner_analytics._calculate_matched(mock_runner), 0)
|
liampauling/flumine
|
tests/test_middleware.py
|
Python
|
mit
| 21,263
|
"""
Пример:
Пусть n = 3, т. е. есть три элемента (1, 2, 3). Пусть k = 2.
Все различные сочетания из 3 элементов по 2: (1, 2), (1, 3), (2, 3).
Различных сочетаний три, поэтому C(3, 2) = 3.
Несложно понять, что C(n, 0) = 1, так как из n элементов выбрать 0 можно единственным образом, а именно, ничего не выбрать.
Также несложно понять, что если k > n, то C(n, k) = 0, так как невозможно, например, из трех элементов выбрать пять.
Для вычисления C(n, k) в других случаях используется следующая рекуррентная формула:
C(n, k) = C(n - 1, k) + C(n - 1, k - 1).
Вашей программе на вход подается строка, содержащая два целых числа n и k (1 ≤ n ≤ 10, 0 ≤ k ≤ 10).
Ваша программа должна вывести единственное число: C(n, k).
Sample Input 1:
3 2
Sample Output 1:
3
Sample Input 2:
10 5
Sample Output 2:
252
"""
def fn(n, k):
#if n not in range(1, 11):
# return 0
#if k not in range(0, 11):
# return 0
if k == 0:
return 1
if k > n:
return 0
return fn(n - 1, k) + fn(n - 1, k - 1)
n, k = map(int, input().split())
print(fn(n, k))
|
RootTeam/pytaskscollect
|
func/Stepic_1_3_Cnk.py
|
Python
|
mit
| 1,472
|
"""
The `Sitemaps protocol <http://en.wikipedia.org/wiki/Sitemaps>`_ allows a webmaster
to inform search engines about URLs on a website that are available for crawling.
Django comes with a high-level framework that makes generating sitemap XML files easy.
Install the sitemap application as per the `instructions in the django documentation
<https://docs.djangoproject.com/en/dev/ref/contrib/sitemaps/>`_, then edit your
project's ``urls.py`` and add a reference to Photologue's Sitemap classes in order to
included all the publicly-viewable Photologue pages:
.. code-block:: python
...
from photologue.sitemaps import GallerySitemap, PhotoSitemap
sitemaps = {...
'photologue_galleries': GallerySitemap,
'photologue_photos': PhotoSitemap,
...
}
etc...
There are 2 sitemap classes, as in some cases you may want to have gallery pages,
but no photo detail page (e.g. if all photos are displayed via a javascript
lightbox).
"""
from django.contrib.sitemaps import Sitemap
from .models import Gallery, Photo
# Note: Gallery and Photo are split, because there are use cases for having galleries
# in the sitemap, but not photos (e.g. if the photos are displayed with a lightbox).
class GallerySitemap(Sitemap):
def items(self):
# The following code is very basic and will probably cause problems with
# large querysets.
return Gallery.objects.on_site().is_public()
def lastmod(self, obj):
return obj.date_added
class PhotoSitemap(Sitemap):
def items(self):
# The following code is very basic and will probably cause problems with
# large querysets.
return Photo.objects.on_site().is_public()
def lastmod(self, obj):
return obj.date_added
|
rmaceissoft/django-photologue
|
photologue/sitemaps.py
|
Python
|
bsd-3-clause
| 1,809
|
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.from oslo.config import cfg.
import eventlet
eventlet.monkey_patch()
from oslo_config import cfg
from oslo_log import log
from oslo_service import loopingcall
from oslo_utils import timeutils
import random
import requests
from neutron import context as neutron_context
from neutron.db import agents_db
from neutron.db import common_db_mixin
from neutron import manager
from networking_vsphere._i18n import _, _LE, _LI, _LW
from networking_vsphere.common import constants as ovsvapp_const
from networking_vsphere.db import ovsvapp_db
LOG = log.getLogger(__name__)
DEFAULT_MONITOR_INTERVAL = 10
# OVSvApp Fault Management config read from neutron.conf.
OVSVAPP_MONITOR_OPTS = [
cfg.BoolOpt('enable_ovsvapp_monitor', default=False,
help=_('To monitor the OVSvApp Agents.'))
]
cfg.CONF.register_opts(OVSVAPP_MONITOR_OPTS, "OVSVAPP")
class AgentMonitor(agents_db.AgentDbMixin, common_db_mixin.CommonDbMixin):
"""OVSvApp agent monitor class.
Represents agent_monitor class which maintains active and inactive
agents and reschedules its resources.
"""
active_agents = []
inactive_agents = []
agents = {}
context = None
notifier = None
plugin = None
agent_ext_support = None
_pool = None
@property
def threadpool(self):
if self._pool is None:
self._pool = eventlet.GreenPool(2)
return self._pool
def initialize_thread(self, notifier):
"""Initialization of agent monitor thread."""
try:
self.notifier = notifier
monitor_interval = DEFAULT_MONITOR_INTERVAL
api_worker_count = cfg.CONF.api_workers
if api_worker_count and api_worker_count > 4:
monitor_interval = 4 * api_worker_count
monitor_thread = loopingcall.FixedIntervalLoopingCall(
self.monitor_agent_state)
monitor_thread.start(interval=monitor_interval)
LOG.debug("Successfully initialized agent monitor "
"thread with loop interval: %s.", monitor_interval)
except Exception:
LOG.exception(_LE("Cannot initialize agent monitor thread.."))
def _update_agent_admin_state(self, context, id, agt):
agent_data = agt['agent']
with context.session.begin(subtransactions=True):
agent = self._get_agent(context, id)
if agent['admin_state_up'] != agent_data['admin_state_up']:
agent.update(agent_data)
return True
return False
def update_agent_state(self, agent_id, status):
agent_state = {'agent': {'admin_state_up': status}}
return self._update_agent_admin_state(self.context,
agent_id,
agent_state)
def _get_eligible_ovsvapp_agent(self, cluster_id, vcenter_id):
cluster_agents = []
agents = self.plugin.get_agents(
self.context,
filters={'agent_type': [ovsvapp_const.AGENT_TYPE_OVSVAPP]})
for agent in agents:
agent_cluster_id = agent['configurations'].get('cluster_id')
agent_vcenter_id = agent['configurations'].get('vcenter_id')
if (cluster_id != agent_cluster_id) or (
vcenter_id != agent_vcenter_id):
continue
cluster_agents.append(agent)
if not cluster_agents:
return
_agent = random.choice(cluster_agents)
recent_time = _agent['heartbeat_timestamp']
if not timeutils.is_older_than(recent_time,
cfg.CONF.agent_down_time):
return _agent
cluster_agents.remove(_agent)
for agent in cluster_agents:
delta = timeutils.delta_seconds(recent_time,
agent['heartbeat_timestamp'])
if delta > 0:
if not timeutils.is_older_than(agent['heartbeat_timestamp'],
cfg.CONF.agent_down_time):
return agent
def process_ovsvapp_agent(self, agent):
"""Inform the OVSvApp agent.
To set the other host into maintenance or shutdown mode.
"""
try:
LOG.info(_LI("Processing the OVSvApp agent to set the other host "
"into maintenance or shutdown mode %s."), agent)
device_data = {}
agent_config = agent['configurations']
source_host = agent_config.get('esx_host_name')
chosen_agent = self._get_eligible_ovsvapp_agent(
agent_config['cluster_id'], agent_config['vcenter_id'])
if chosen_agent and (chosen_agent['id'] in self.active_agents):
cluster_id = chosen_agent['configurations'].get('cluster_id')
device_data['assigned_agent_host'] = chosen_agent['host']
device_data['esx_host_name'] = source_host
device_data['ovsvapp_agent'] = '-'.join(
['ovsvapp', source_host.replace('.', '-')])
LOG.info(_LI("Invoking device_update RPC with"
"target host %s."),
chosen_agent['host'])
self.notifier.device_update(self.context,
device_data, cluster_id)
else:
ovsvapp_db.set_cluster_threshold(agent_config['vcenter_id'],
agent_config['cluster_id'])
LOG.info(_LI("No eligible OVSvApp agents found for "
"processing. Reverting DB status for the agent."))
self.update_agent_state(agent['id'], True)
except Exception:
agent_config = agent['configurations']
ovsvapp_db.set_cluster_threshold(agent_config['vcenter_id'],
agent_config['cluster_id'])
LOG.exception(_LE("Unable to inform the OVSvApp agent for "
"Host - maintenance or shutdown operation."))
def _check_datapath_health(self, monitoring_ip):
if monitoring_ip:
url = 'http://%s:8080/status.json' % monitoring_ip
try:
response = requests.get(url, timeout=5)
if response:
LOG.debug("HTTP response from OVSvApp agent@ %(ip)s is "
"%(res)s", {'res': response,
'ip': monitoring_ip})
status = response.json()
LOG.info(_LI("ovs status is %(st)s from agent@ %(ip)s")
% {'st': status, 'ip': monitoring_ip})
return (status.get('ovs') == "OK")
except Exception:
LOG.exception(_LE("Failed to get OVS status. Will continue "
"with mitigation."))
return False
def check_ovsvapp_data_path(self, agent):
agent_config = agent['configurations']
# Check if the Data path is alright.
monitoring_ip = agent_config.get('monitoring_ip')
datapath_health = self._check_datapath_health(monitoring_ip)
if datapath_health:
LOG.info(_LI("Data path looks to be OK on %s. "
"Skipping mitigation."), agent['host'])
LOG.warning(_LW("Issues encountered in receiving "
"heartbeats from OVSvApp Agent on "
"host %s."), agent['host'])
else:
LOG.warning(_LW("Data path seems to be broken already on %s."
"Will continue with mitigation."), agent['host'])
return datapath_health
def _check_plugin_ext_support(self, extension):
"""Helper Method.
To check if plugin supports Agent Management Extension.
"""
try:
if self.plugin:
return extension in self.plugin.supported_extension_aliases
except Exception:
LOG.exception(_LE("%s extension is not supported."), extension)
return False
def get_plugin_and_initialize(self):
"""Initializes plugin and populates list of all agents."""
try:
self.context = neutron_context.get_admin_context()
self.plugin = manager.NeutronManager.get_plugin()
if not self.plugin:
return False
self.agent_ext_support = self._check_plugin_ext_support('agent')
except Exception:
LOG.warning(_LW("Failed initialization of agent monitor.."))
return False
return True
def monitor_agent_state(self):
"""Thread to monitor agent state.
Represents a thread which maintains list of active
and inactive agents based on the heartbeat recorded.
"""
# Do nothing until plugin is initialized.
if not self.plugin:
status = self.get_plugin_and_initialize()
if not status:
LOG.warning(_LW("Plugin not defined...returning!"))
return
if not self.agent_ext_support:
LOG.warning(_LW("Agent extension is not loaded by plugin."))
return
try:
self.agents = self.plugin.get_agents(
self.context,
filters={'agent_type': [ovsvapp_const.AGENT_TYPE_OVSVAPP]})
except Exception:
LOG.exception(_LE("Unable to get agent list."))
return
for agent in self.agents:
agent_time_stamp = agent['heartbeat_timestamp']
agent_id = agent['id']
status = timeutils.is_older_than(agent_time_stamp,
cfg.CONF.agent_down_time * 2)
LOG.debug("For ovsvapp_agent %(agent)s agent_state %(state)s.",
{'agent': agent, 'state': status})
try:
agent_config = agent['configurations']
if not status:
if agent_id not in self.active_agents:
self.active_agents.append(agent_id)
self.update_agent_state(agent_id, True)
if agent_id in self.inactive_agents:
LOG.info(_LI("Removing agent: %s from inactive "
"agent list."), agent_id)
self.inactive_agents.remove(agent_id)
ovsvapp_db.reset_cluster_threshold(
agent_config['vcenter_id'],
agent_config['cluster_id']
)
else:
if not agent['admin_state_up']:
# This agent is already handled in earlier run or by
# another Neutron server. Just update the cache and
# proceed further.
if agent_id not in self.inactive_agents:
LOG.info(_LI("Moving agent: %s from active to "
"inactive."), agent_id)
self.inactive_agents.append(agent_id)
if agent_id in self.active_agents:
self.active_agents.remove(agent_id)
continue
if self.update_agent_state(agent_id, False):
# Got the ownership for mitigating this agent.
if agent_id in self.active_agents:
self.active_agents.remove(agent_id)
if self.check_ovsvapp_data_path(agent):
continue
cluster_status = (
ovsvapp_db.update_and_get_cluster_lock(
agent_config['vcenter_id'],
agent_config['cluster_id']))
if cluster_status == ovsvapp_db.SUCCESS:
# Got the cluster lock for mitigating this agent.
self.threadpool.spawn_n(self.process_ovsvapp_agent,
agent)
LOG.info(_LI("Spawned a thread for processing "
"OVSvApp Agent %s."), agent['id'])
if agent_id not in self.inactive_agents:
LOG.info(_LI("Moving agent: %s from active to "
"inactive."), agent_id)
self.inactive_agents.append(agent_id)
elif cluster_status == ovsvapp_db.RETRY:
self.update_agent_state(agent['id'], True)
LOG.debug("Will retry the agent %s in the next "
"iteration.", agent['id'])
elif cluster_status == ovsvapp_db.GIVE_UP:
self.update_agent_state(agent['id'], True)
LOG.debug("Threshold already reached. Will retry "
"the agent %s in the next run",
agent['id'])
except Exception:
LOG.exception(_LE("Exception occurred in"
"monitor_agent_state."))
|
VTabolin/networking-vsphere
|
networking_vsphere/monitor/ovsvapp_monitor.py
|
Python
|
apache-2.0
| 14,206
|
import base64
import json
import os
import uuid
import threading
from multiprocessing.managers import AcquirerProxy, BaseManager, DictProxy
class ServerDictManager(BaseManager):
shared_data = {}
def _get_shared():
return ServerDictManager.shared_data
ServerDictManager.register("get_dict",
callable=_get_shared,
proxytype=DictProxy)
ServerDictManager.register('Lock', threading.Lock, AcquirerProxy)
class ClientDictManager(BaseManager):
pass
ClientDictManager.register("get_dict")
ClientDictManager.register("Lock")
class StashServer(object):
def __init__(self, address=None, authkey=None):
self.address = address
self.authkey = authkey
self.manager = None
def __enter__(self):
self.manager, self.address, self.authkey = start_server(self.address, self.authkey)
store_env_config(self.address, self.authkey)
def __exit__(self, *args, **kwargs):
if self.manager is not None:
self.manager.shutdown()
def load_env_config():
address, authkey = json.loads(os.environ["WPT_STASH_CONFIG"])
if isinstance(address, list):
address = tuple(address)
else:
address = str(address)
authkey = base64.decodestring(authkey)
return address, authkey
def store_env_config(address, authkey):
authkey = base64.encodestring(authkey)
os.environ["WPT_STASH_CONFIG"] = json.dumps((address, authkey))
def start_server(address=None, authkey=None):
manager = ServerDictManager(address, authkey)
manager.start()
return (manager, manager._address, manager._authkey)
class LockWrapper(object):
def __init__(self, lock):
self.lock = lock
def acquire(self):
self.lock.acquire()
def release(self):
self.lock.release()
def __enter__(self):
self.acquire()
def __exit__(self, *args, **kwargs):
self.release()
#TODO: Consider expiring values after some fixed time for long-running
#servers
class Stash(object):
"""Key-value store for persisting data across HTTP/S and WS/S requests.
This data store is specifically designed for persisting data across server
requests. The synchronization is achieved by using the BaseManager from
the multiprocessing module so different processes can acccess the same data.
Stash can be used interchangeably between HTTP, HTTPS, WS and WSS servers.
A thing to note about WS/S servers is that they require additional steps in
the handlers for accessing the same underlying shared data in the Stash.
This can usually be achieved by using load_env_config(). When using Stash
interchangeably between HTTP/S and WS/S request, the path part of the key
should be expliclitly specified if accessing the same key/value subset.
The store has several unusual properties. Keys are of the form (path,
uuid), where path is, by default, the path in the HTTP request and
uuid is a unique id. In addition, the store is write-once, read-once,
i.e. the value associated with a particular key cannot be changed once
written and the read operation (called "take") is destructive. Taken together,
these properties make it difficult for data to accidentally leak
between different resources or different requests for the same
resource.
"""
_proxy = None
lock = None
def __init__(self, default_path, address=None, authkey=None):
self.default_path = default_path
self._get_proxy(address, authkey)
self.data = Stash._proxy
def _get_proxy(self, address=None, authkey=None):
if address is None and authkey is None:
Stash._proxy = {}
Stash.lock = threading.Lock()
if Stash._proxy is None:
manager = ClientDictManager(address, authkey)
manager.connect()
Stash._proxy = manager.get_dict()
Stash.lock = LockWrapper(manager.Lock())
def _wrap_key(self, key, path):
if path is None:
path = self.default_path
# This key format is required to support using the path. Since the data
# passed into the stash can be a DictProxy which wouldn't detect changes
# when writing to a subdict.
return (str(path), str(uuid.UUID(key)))
def put(self, key, value, path=None):
"""Place a value in the shared stash.
:param key: A UUID to use as the data's key.
:param value: The data to store. This can be any python object.
:param path: The path that has access to read the data (by default
the current request path)"""
if value is None:
raise ValueError("SharedStash value may not be set to None")
internal_key = self._wrap_key(key, path)
if internal_key in self.data:
raise StashError("Tried to overwrite existing shared stash value "
"for key %s (old value was %s, new value is %s)" %
(internal_key, self.data[str(internal_key)], value))
else:
self.data[internal_key] = value
def take(self, key, path=None):
"""Remove a value from the shared stash and return it.
:param key: A UUID to use as the data's key.
:param path: The path that has access to read the data (by default
the current request path)"""
internal_key = self._wrap_key(key, path)
value = self.data.get(internal_key, None)
if value is not None:
try:
self.data.pop(internal_key)
except KeyError:
# Silently continue when pop error occurs.
pass
return value
class StashError(Exception):
pass
|
anthgur/servo
|
tests/wpt/web-platform-tests/tools/wptserve/wptserve/stash.py
|
Python
|
mpl-2.0
| 5,785
|
#!/usr/bin/python3
from flask import Flask
import os
import requests
from flask import Flask, render_template, request
import urllib.request
import time
from time import sleep
import threading # performing multiple tasks at the same time
from queue import Queue # " "
app = Flask(__name__)
# Initialising variables to be used globally
# Not the best way of doing this, but I am trying to keep it super simple
# as I learn how to use Flask. I promise next time it'll be better.
otime = 0
ctime = 0
state = "init" # starting with dummy data
ip = "1.1.1.1" # starting with dummy data
port="5000" # debug port is 5000. Once completed should be changed to prod port 80
connstate = False # Used to track connection status of Coopener to SmartHome
lastSeen = int(time.time()) # Number of seconds since epoch. Used to track heartbeats for connection
timeout = 30 # Number of seconds to wait for heartbeat before dropping connection
@app.route("/")
def index():
global timeout
global lastSeen
global connstate
#if connstate == True: # Coopener is connected, display information
if (int(time.time()) - lastSeen) >= timeout:
connstate = False
return render_template('index.html', connstate=connstate)
return render_template('index.html', connstate=connstate, otime=otime, ctime=ctime, state=state, ip=ip, port=port)
#return "Hello World! - Love from SmartHome"
@app.route('/handshake')
def shake():
'''
Handshake between Coopener and Smarthome srvr
3-way handshake, initiated by Coopener.
Coopener ---> SmartHome. Sends Coopener IP and shake=1
SmartHome ---> Coopener. Responds "OK(2)"
Coopener ---> SmartHome. Sends shake=3. state=x, ctime=y, otime=z
Where state is whether door is currently open or close. otime is how many seconds to open door (0 is already open)
ctime is how many seconds to close door (0 is already closed)
'''
global otime
global ctime
global state
global ip
global connstate
global lastSeen
if request.args.get('shake'):
if request.args.get('shake') == "1": # Coopener is initiating handshake
if request.args.get('ip'): ip = request.args.get('ip')
connstate = False # Even if connection was already established, tear it down and let Coopener start again
return "OK(2)"
if request.args.get('shake') == "3": # Initial contact made, Coopener is now sending its info
if request.args.get('otime'): # Get door open time (second remaining)
otime = request.args.get('otime')
otime = int(otime) # Convert to integer. Yes, I know this is ugly.
if request.args.get('ctime'): # Get door close time (seconds remaining)
ctime = request.args.get('ctime')
ctime = int(ctime)
if request.args.get('state'): state = request.args.get('state')
lastSeen = int(time.time()) # Number of seconds since epoch. This is when we last got a heartbeat
connstate = True # Connection to Coopener is now established
return "OK(4)"
@app.route('/heartbeat')
def beat():
'''
Every n seconds Coopener should send a heartbeat/keepalive for the established connection
It will contain the current state of Coopener in it. Yes, its extra overhead... but its simpler.
I will need to re-write this entire protocol one day as it is really ugly and way too rigid.
'''
global otime
global ctime
global state
global ip
global connstate
global lastSeen
if (connstate == True):
lastSeen = int(time.time()) # Number of seconds since epoch. This is when we last got a heartbeat
if request.args.get('otime'): # Get door open time (second remaining)
otime = request.args.get('otime')
otime = int(otime) # Convert to integer. Yes, I know this is ugly.
if request.args.get('ctime'): # Get door close time (seconds remaining)
ctime = request.args.get('ctime')
ctime = int(ctime)
if request.args.get('state'): state = request.args.get('state')
return "OK(HB)"
else:
return "No established connection found"
if __name__ == "__main__":
#app.debug = True
app.run(host='0.0.0.0')
|
Aristocles/coopener
|
v2/smarthome/webtool.py
|
Python
|
gpl-3.0
| 4,099
|
widths = {'A': 667,
'AE': 944,
'Aacute': 667,
'Acircumflex': 667,
'Adieresis': 667,
'Agrave': 667,
'Aring': 667,
'Atilde': 667,
'B': 667,
'C': 667,
'Ccedilla': 667,
'D': 722,
'E': 667,
'Eacute': 667,
'Ecircumflex': 667,
'Edieresis': 667,
'Egrave': 667,
'Eth': 722,
'Euro': 500,
'F': 667,
'G': 722,
'H': 778,
'I': 389,
'Iacute': 389,
'Icircumflex': 389,
'Idieresis': 389,
'Igrave': 389,
'J': 500,
'K': 667,
'L': 611,
'Lslash': 611,
'M': 889,
'N': 722,
'Ntilde': 722,
'O': 722,
'OE': 944,
'Oacute': 722,
'Ocircumflex': 722,
'Odieresis': 722,
'Ograve': 722,
'Oslash': 722,
'Otilde': 722,
'P': 611,
'Q': 722,
'R': 667,
'S': 556,
'Scaron': 556,
'T': 611,
'Thorn': 611,
'U': 722,
'Uacute': 722,
'Ucircumflex': 722,
'Udieresis': 722,
'Ugrave': 722,
'V': 667,
'W': 889,
'X': 667,
'Y': 611,
'Yacute': 611,
'Ydieresis': 611,
'Z': 611,
'Zcaron': 611,
'a': 500,
'aacute': 500,
'acircumflex': 500,
'acute': 333,
'adieresis': 500,
'ae': 722,
'agrave': 500,
'ampersand': 778,
'aring': 500,
'asciicircum': 570,
'asciitilde': 570,
'asterisk': 500,
'at': 832,
'atilde': 500,
'b': 500,
'backslash': 278,
'bar': 220,
'braceleft': 348,
'braceright': 348,
'bracketleft': 333,
'bracketright': 333,
'breve': 333,
'brokenbar': 220,
'bullet': 350,
'c': 444,
'caron': 333,
'ccedilla': 444,
'cedilla': 333,
'cent': 500,
'circumflex': 333,
'colon': 333,
'comma': 250,
'copyright': 747,
'currency': 500,
'd': 500,
'dagger': 500,
'daggerdbl': 500,
'degree': 400,
'dieresis': 333,
'divide': 570,
'dollar': 500,
'dotaccent': 333,
'dotlessi': 278,
'e': 444,
'eacute': 444,
'ecircumflex': 444,
'edieresis': 444,
'egrave': 444,
'eight': 500,
'ellipsis': 1000,
'emdash': 1000,
'endash': 500,
'equal': 570,
'eth': 500,
'exclam': 389,
'exclamdown': 389,
'f': 333,
'fi': 556,
'five': 500,
'fl': 556,
'florin': 500,
'four': 500,
'fraction': 167,
'g': 500,
'germandbls': 500,
'grave': 333,
'greater': 570,
'guillemotleft': 500,
'guillemotright': 500,
'guilsinglleft': 333,
'guilsinglright': 333,
'h': 556,
'hungarumlaut': 333,
'hyphen': 333,
'i': 278,
'iacute': 278,
'icircumflex': 278,
'idieresis': 278,
'igrave': 278,
'j': 278,
'k': 500,
'l': 278,
'less': 570,
'logicalnot': 606,
'lslash': 278,
'm': 778,
'macron': 333,
'minus': 606,
'mu': 576,
'multiply': 570,
'n': 556,
'nine': 500,
'ntilde': 556,
'numbersign': 500,
'o': 500,
'oacute': 500,
'ocircumflex': 500,
'odieresis': 500,
'oe': 722,
'ogonek': 333,
'ograve': 500,
'one': 500,
'onehalf': 750,
'onequarter': 750,
'onesuperior': 300,
'ordfeminine': 266,
'ordmasculine': 300,
'oslash': 500,
'otilde': 500,
'p': 500,
'paragraph': 500,
'parenleft': 333,
'parenright': 333,
'percent': 833,
'period': 250,
'periodcentered': 250,
'perthousand': 1000,
'plus': 570,
'plusminus': 570,
'q': 500,
'question': 500,
'questiondown': 500,
'quotedbl': 555,
'quotedblbase': 500,
'quotedblleft': 500,
'quotedblright': 500,
'quoteleft': 333,
'quoteright': 333,
'quotesinglbase': 333,
'quotesingle': 278,
'r': 389,
'registered': 747,
'ring': 333,
's': 389,
'scaron': 389,
'section': 500,
'semicolon': 333,
'seven': 500,
'six': 500,
'slash': 278,
'space': 250,
'sterling': 500,
't': 278,
'thorn': 500,
'three': 500,
'threequarters': 750,
'threesuperior': 300,
'tilde': 333,
'trademark': 1000,
'two': 500,
'twosuperior': 300,
'u': 556,
'uacute': 556,
'ucircumflex': 556,
'udieresis': 556,
'ugrave': 556,
'underscore': 500,
'v': 444,
'w': 667,
'x': 500,
'y': 444,
'yacute': 444,
'ydieresis': 444,
'yen': 500,
'z': 389,
'zcaron': 389,
'zero': 500}
|
olivierdalang/stdm
|
third_party/reportlab/pdfbase/_fontdata_widths_timesbolditalic.py
|
Python
|
gpl-2.0
| 3,897
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
import sqlite3
from sys import stderr
from os import system, path, makedirs
from lm.dbhelper import Configure, ExperimentManager
from lm.dbhelper import DEFAULT_CONF_DIR
path_variable = 'LM_LOGS_PATH'
DEFAULT_PARENT_LOG_DIRECTORY = './logs/'
class Executor:
use_nohup = None
hide_output = None
args = []
parent_log_dir = DEFAULT_PARENT_LOG_DIRECTORY
log_dir = None
exp_id = None
commands = None
stdout_path = None
stderr_path = None
conf = None
def __init__(self, parser):
parser.add_argument('commands', type=str)
parser.add_argument('--use_nohup', default=True, type=bool)
args = parser.parse_args()
self.commands = args.commands
self.use_nohup = args.use_nohup
try:
self.conf = Configure()
self.manager = ExperimentManager()
except sqlite3.OperationalError:
print("Erro: cannot connect to the database", file=stderr)
print("Please use 'python lm.py init' in the "
+ "current directory before execusion.\n")
exit()
log_dir, exp_id = self.decide_log_dir(self.conf)
self.create_log_dir(log_dir)
self.log_dir = log_dir
self.exp_id = exp_id
self.args = args
self.stdout_path = path.join(log_dir, "stdout.txt")
self.stderr_path = path.join(log_dir, "stderr.txt")
def decide_log_dir(self, conf):
with conf.conn:
# import ipdb; ipdb.set_trace()
num = conf.get_and_increment_num_exp()
p = path.join(self.parent_log_dir,
"%d.exp/" % num)
return (p, num)
def create_log_dir(self, log_dir):
if path.exists(path.dirname(log_dir)):
raise RuntimeError(
"Error: Somehow automatically decided " + \
"the unique log folder is already exists:",log_dir)
makedirs(log_dir)
def experiment_start(self):
print("#### The Experiment is Started. ####")
self.manager.experiment_start(self.exp_id,
self.commands)
def experiment_completed(self):
self.manager.experiment_completed(self.stdout_path,
self.stderr_path)
print("#### The Experiment is Completed. ####")
def experiment_aborted(self):
self.manager.experiment_aborted(self.stdout_path,
self.stderr_path)
print("#### The Experiment is Aborted. ####")
def experiment_error(self):
self.manager.experiment_error(self.stdout_path,
self.stderr_path)
print("#### The Experiment Raises An Error. ####")
def execute(self):
self.experiment_start()
# create files
open(self.stdout_path, 'a').close()
open(self.stderr_path, 'a').close()
try:
# Note:
# This solution does not work in sh.
# See https://stackoverflow.com/questions/692000/how-do-i-write-stderr-to-a-file-while-using-tee-with-a-pipe
cmd = ' '.join(
["env %s=%s" % (path_variable, self.log_dir),
"nohup" if self.use_nohup else "",
"%s" % self.commands,
"> >(tee %s) " % self.stdout_path,
"2> >(tee %s >&2)" % self.stderr_path]
)
print("Executed shell command:")
print(" -", cmd)
# Replaced os.system to subprocess.run (python 3.5 is required)
## state = system(cmd)
state = subprocess.run(cmd, shell=True, check=True,
executable='/bin/bash')
self.experiment_completed()
except (KeyboardInterrupt, SystemExit):
print ("\naborted.")
self.experiment_aborted()
except subprocess.CalledProcessError:
print ("\ncrashed.")
self.experiment_error()
|
torotoki/lm
|
lm/run.py
|
Python
|
gpl-3.0
| 3,682
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Xedit(AutotoolsPackage):
"""Xedit is a simple text editor for X."""
homepage = "https://cgit.freedesktop.org/xorg/app/xedit"
url = "https://www.x.org/archive/individual/app/xedit-1.2.2.tar.gz"
version('1.2.2', '9fb9d6f63b574e5a4937384fbe6579c1')
depends_on('libxaw')
depends_on('libxmu')
depends_on('libxt@1.0:')
depends_on('libx11')
depends_on('pkg-config@0.9.0:', type='build')
depends_on('util-macros', type='build')
|
skosukhin/spack
|
var/spack/repos/builtin/packages/xedit/package.py
|
Python
|
lgpl-2.1
| 1,732
|
SCHEMA = {
"type": "object",
"properties": {
"name": {"type" : "string"},
"amount": {"type" : "number"},
"currency_code": {"type" : "string"},
"interval": {"type" : "string"},
"interval_count": {"type" : "number"},
"trial_days": {"type" : "number"},
"limit": {"type" : "number"},
"metadata": {"type" : "object"}
},
"required": ["name", "amount", "currency_code", "interval", "interval_count"]
}
|
culqi/culqi-python
|
culqi/schemas/plan.py
|
Python
|
mit
| 534
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from urlparse import urljoin
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.contrib.sites.models import Site
from apps.core.models import User
from apps.mail.libs import send_to
from apps.mail import MessageType
def notify_group_mapping_approved(request, group, username):
user = get_object_or_404(User, username=username)
reservations_url = request.build_absolute_uri(reverse('reservations'))
return send_to(user,
MessageType.GROUP_MAPPING_APPROVED,
group=group,
reservations_url=reservations_url)
def notify_rsvp(request, user, event):
relative_event_url = reverse('event_detail', kwargs={
'group_slug': event.group.slug,
'event_slug': event.slug
})
event_url = request.build_absolute_uri(relative_event_url)
return send_to(user,
MessageType.RSVP,
event=event,
event_url=event_url)
def send_reservation_reminder(user_id, **kwargs):
user = get_object_or_404(User, id=user_id)
reservations_url = urljoin(
'http://%s' % Site.objects.get_current().domain,
reverse('reservations'))
return send_to(user, MessageType.RESERVATION_REMINDER,
reservations_url=reservations_url,
**kwargs)
|
RickMohr/nyc-trees
|
src/nyc_trees/apps/mail/views.py
|
Python
|
apache-2.0
| 1,504
|
import math
from unittest import TestCase
import datetime
import plotly.figure_factory as ff
import plotly.io as pio
from plotly.exceptions import PlotlyError
from plotly.tests.test_optional.optional_utils import NumpyTestUtilsMixin
from plotly.graph_objs import graph_objs
from plotly.tests.utils import TestCaseNoTemplate
class TestQuiver(TestCaseNoTemplate, NumpyTestUtilsMixin):
def test_unequal_xy_length(self):
# check: PlotlyError if x and y are not the same length
kwargs = {"x": [1, 2], "y": [1], "u": [1, 2], "v": [1, 2]}
self.assertRaises(PlotlyError, ff.create_quiver, **kwargs)
def test_wrong_scale(self):
# check: ValueError if scale is <= 0
kwargs = {"x": [1, 2], "y": [1, 2], "u": [1, 2], "v": [1, 2], "scale": -1}
self.assertRaises(ValueError, ff.create_quiver, **kwargs)
kwargs = {"x": [1, 2], "y": [1, 2], "u": [1, 2], "v": [1, 2], "scale": 0}
self.assertRaises(ValueError, ff.create_quiver, **kwargs)
def test_wrong_arrow_scale(self):
# check: ValueError if arrow_scale is <= 0
kwargs = {"x": [1, 2], "y": [1, 2], "u": [1, 2], "v": [1, 2], "arrow_scale": -1}
self.assertRaises(ValueError, ff.create_quiver, **kwargs)
kwargs = {"x": [1, 2], "y": [1, 2], "u": [1, 2], "v": [1, 2], "arrow_scale": 0}
self.assertRaises(ValueError, ff.create_quiver, **kwargs)
def test_one_arrow(self):
# we should be able to create a single arrow using create_quiver
quiver = ff.create_quiver(x=[1], y=[1], u=[1], v=[1], scale=1)
expected_quiver = {
"data": [
{
"mode": "lines",
"type": "scatter",
"x": [1, 2, None, 1.820698256761928, 2, 1.615486170766527, None],
"y": [1, 2, None, 1.615486170766527, 2, 1.820698256761928, None],
}
],
"layout": {"hovermode": "closest"},
}
self.assert_fig_equal(quiver["data"][0], expected_quiver["data"][0])
self.assert_fig_equal(quiver["layout"], expected_quiver["layout"])
def test_more_kwargs(self):
# we should be able to create 2 arrows and change the arrow_scale,
# angle, and arrow using create_quiver
quiver = ff.create_quiver(
x=[1, 2],
y=[1, 2],
u=[math.cos(1), math.cos(2)],
v=[math.sin(1), math.sin(2)],
arrow_scale=0.4,
angle=math.pi / 6,
line=graph_objs.scatter.Line(color="purple", width=3),
)
expected_quiver = {
"data": [
{
"line": {"color": "purple", "width": 3},
"mode": "lines",
"type": "scatter",
"x": [
1,
1.0540302305868139,
None,
2,
1.9583853163452858,
None,
1.052143029378767,
1.0540302305868139,
1.0184841899864512,
None,
1.9909870141679737,
1.9583853163452858,
1.9546151170949464,
None,
],
"y": [
1,
1.0841470984807897,
None,
2,
2.0909297426825684,
None,
1.044191642387781,
1.0841470984807897,
1.0658037346225067,
None,
2.0677536925644366,
2.0909297426825684,
2.051107819102551,
None,
],
}
],
"layout": {"hovermode": "closest"},
}
self.assert_fig_equal(quiver["data"][0], expected_quiver["data"][0])
self.assert_fig_equal(quiver["layout"], expected_quiver["layout"])
class TestFinanceCharts(TestCaseNoTemplate, NumpyTestUtilsMixin):
def test_unequal_ohlc_length(self):
# check: PlotlyError if open, high, low, close are not the same length
# for TraceFactory.create_ohlc and TraceFactory.create_candlestick
kwargs = {
"open": [1],
"high": [1, 3],
"low": [1, 2],
"close": [1, 2],
"direction": ["increasing"],
}
self.assertRaises(PlotlyError, ff.create_ohlc, **kwargs)
self.assertRaises(PlotlyError, ff.create_candlestick, **kwargs)
kwargs = {
"open": [1, 2],
"high": [1, 2, 3],
"low": [1, 2],
"close": [1, 2],
"direction": ["decreasing"],
}
self.assertRaises(PlotlyError, ff.create_ohlc, **kwargs)
self.assertRaises(PlotlyError, ff.create_candlestick, **kwargs)
kwargs = {"open": [1, 2], "high": [2, 3], "low": [0], "close": [1, 3]}
self.assertRaises(PlotlyError, ff.create_ohlc, **kwargs)
self.assertRaises(PlotlyError, ff.create_candlestick, **kwargs)
kwargs = {"open": [1, 2], "high": [2, 3], "low": [1, 2], "close": [1]}
self.assertRaises(PlotlyError, ff.create_ohlc, **kwargs)
self.assertRaises(PlotlyError, ff.create_candlestick, **kwargs)
def test_direction_arg(self):
# check: PlotlyError if direction is not defined as "increasing" or
# "decreasing" for TraceFactory.create_ohlc and
# TraceFactory.create_candlestick
kwargs = {
"open": [1, 4],
"high": [1, 5],
"low": [1, 2],
"close": [1, 2],
"direction": ["inc"],
}
self.assertRaisesRegexp(
PlotlyError,
"direction must be defined as " "'increasing', 'decreasing', or 'both'",
ff.create_ohlc,
**kwargs
)
self.assertRaisesRegexp(
PlotlyError,
"direction must be defined as " "'increasing', 'decreasing', or 'both'",
ff.create_candlestick,
**kwargs
)
kwargs = {
"open": [1, 2],
"high": [1, 3],
"low": [1, 2],
"close": [1, 2],
"direction": ["d"],
}
self.assertRaisesRegexp(
PlotlyError,
"direction must be defined as " "'increasing', 'decreasing', or 'both'",
ff.create_ohlc,
**kwargs
)
self.assertRaisesRegexp(
PlotlyError,
"direction must be defined as " "'increasing', 'decreasing', or 'both'",
ff.create_candlestick,
**kwargs
)
def test_high_highest_value(self):
# check: PlotlyError if the "high" value is less than the corresponding
# open, low, or close value because if the "high" value is not the
# highest (or equal) then the data may have been entered incorrectly.
kwargs = {"open": [2, 3], "high": [4, 2], "low": [1, 1], "close": [1, 2]}
self.assertRaisesRegexp(
PlotlyError,
"Oops! Looks like some of "
"your high values are less "
"the corresponding open, "
"low, or close values. "
"Double check that your data "
"is entered in O-H-L-C order",
ff.create_ohlc,
**kwargs
)
self.assertRaisesRegexp(
PlotlyError,
"Oops! Looks like some of "
"your high values are less "
"the corresponding open, "
"low, or close values. "
"Double check that your data "
"is entered in O-H-L-C order",
ff.create_candlestick,
**kwargs
)
def test_low_lowest_value(self):
# check: PlotlyError if the "low" value is greater than the
# corresponding open, high, or close value because if the "low" value
# is not the lowest (or equal) then the data may have been entered
# incorrectly.
# create_ohlc_increase
kwargs = {"open": [2, 3], "high": [4, 6], "low": [3, 1], "close": [1, 2]}
self.assertRaisesRegexp(
PlotlyError,
"Oops! Looks like some of "
"your low values are greater "
"than the corresponding high"
", open, or close values. "
"Double check that your data "
"is entered in O-H-L-C order",
ff.create_ohlc,
**kwargs
)
self.assertRaisesRegexp(
PlotlyError,
"Oops! Looks like some of "
"your low values are greater "
"than the corresponding high"
", open, or close values. "
"Double check that your data "
"is entered in O-H-L-C order",
ff.create_candlestick,
**kwargs
)
def test_one_ohlc(self):
# This should create one "increase" (i.e. close > open) ohlc stick
ohlc = ff.create_ohlc(open=[33.0], high=[33.2], low=[32.7], close=[33.1])
expected_ohlc = {
"layout": {"hovermode": "closest", "xaxis": {"zeroline": False}},
"data": [
{
"y": [33.0, 33.0, 33.2, 32.7, 33.1, 33.1, None],
"line": {"width": 1, "color": "#3D9970"},
"showlegend": False,
"name": "Increasing",
"text": ["Open", "Open", "High", "Low", "Close", "Close", ""],
"mode": "lines",
"type": "scatter",
"x": [-0.2, 0, 0, 0, 0, 0.2, None],
},
{
"y": [],
"line": {"width": 1, "color": "#FF4136"},
"showlegend": False,
"name": "Decreasing",
"text": (),
"mode": "lines",
"type": "scatter",
"x": [],
},
],
}
self.assert_fig_equal(
ohlc["data"][0], expected_ohlc["data"][0], ignore=["uid", "text"]
)
self.assert_fig_equal(
ohlc["data"][1], expected_ohlc["data"][1], ignore=["uid", "text"]
)
self.assert_fig_equal(ohlc["layout"], expected_ohlc["layout"])
def test_one_ohlc_increase(self):
# This should create one "increase" (i.e. close > open) ohlc stick
ohlc_incr = ff.create_ohlc(
open=[33.0], high=[33.2], low=[32.7], close=[33.1], direction="increasing"
)
expected_ohlc_incr = {
"data": [
{
"line": {"color": "#3D9970", "width": 1},
"mode": "lines",
"name": "Increasing",
"showlegend": False,
"text": ["Open", "Open", "High", "Low", "Close", "Close", ""],
"type": "scatter",
"x": [-0.2, 0, 0, 0, 0, 0.2, None],
"y": [33.0, 33.0, 33.2, 32.7, 33.1, 33.1, None],
}
],
"layout": {"hovermode": "closest", "xaxis": {"zeroline": False}},
}
self.assert_fig_equal(ohlc_incr["data"][0], expected_ohlc_incr["data"][0])
self.assert_fig_equal(ohlc_incr["layout"], expected_ohlc_incr["layout"])
def test_one_ohlc_decrease(self):
# This should create one "increase" (i.e. close > open) ohlc stick
ohlc_decr = ff.create_ohlc(
open=[33.0], high=[33.2], low=[30.7], close=[31.1], direction="decreasing"
)
expected_ohlc_decr = {
"data": [
{
"line": {"color": "#FF4136", "width": 1},
"mode": "lines",
"name": "Decreasing",
"showlegend": False,
"text": ["Open", "Open", "High", "Low", "Close", "Close", ""],
"type": "scatter",
"x": [-0.2, 0, 0, 0, 0, 0.2, None],
"y": [33.0, 33.0, 33.2, 30.7, 31.1, 31.1, None],
}
],
"layout": {"hovermode": "closest", "xaxis": {"zeroline": False}},
}
self.assert_fig_equal(ohlc_decr["data"][0], expected_ohlc_decr["data"][0])
self.assert_fig_equal(ohlc_decr["layout"], expected_ohlc_decr["layout"])
# TO-DO: put expected fig in a different file and then call to compare
def test_one_candlestick(self):
# This should create one "increase" (i.e. close > open) candlestick
can_inc = ff.create_candlestick(
open=[33.0], high=[33.2], low=[32.7], close=[33.1]
)
exp_can_inc = {
"data": [
{
"boxpoints": False,
"fillcolor": "#3D9970",
"line": {"color": "#3D9970"},
"name": "Increasing",
"showlegend": False,
"type": "box",
"whiskerwidth": 0,
"x": [0, 0, 0, 0, 0, 0],
"y": [32.7, 33.0, 33.1, 33.1, 33.1, 33.2],
},
{
"boxpoints": False,
"fillcolor": "#ff4136",
"line": {"color": "#ff4136"},
"name": "Decreasing",
"showlegend": False,
"type": "box",
"whiskerwidth": 0,
"x": [],
"y": [],
},
],
"layout": {},
}
self.assert_fig_equal(can_inc["data"][0], exp_can_inc["data"][0])
self.assert_fig_equal(can_inc["layout"], exp_can_inc["layout"])
def test_datetime_ohlc(self):
# Check expected outcome for ohlc chart with datetime xaxis
high_data = [34.20, 34.37, 33.62, 34.25, 35.18, 33.25, 35.37, 34.62]
low_data = [31.70, 30.75, 32.87, 31.62, 30.81, 32.75, 32.75, 32.87]
close_data = [34.10, 31.93, 33.37, 33.18, 31.18, 33.10, 32.93, 33.70]
open_data = [33.01, 33.31, 33.50, 32.06, 34.12, 33.05, 33.31, 33.50]
x = [
datetime.datetime(year=2013, month=3, day=4),
datetime.datetime(year=2013, month=6, day=5),
datetime.datetime(year=2013, month=9, day=6),
datetime.datetime(year=2013, month=12, day=4),
datetime.datetime(year=2014, month=3, day=5),
datetime.datetime(year=2014, month=6, day=6),
datetime.datetime(year=2014, month=9, day=4),
datetime.datetime(year=2014, month=12, day=5),
]
ohlc_d = ff.create_ohlc(open_data, high_data, low_data, close_data, dates=x)
ex_ohlc_d = {
"data": [
{
"line": {"color": "#3D9970", "width": 1},
"mode": "lines",
"name": "Increasing",
"showlegend": False,
"text": [
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
],
"type": "scatter",
"x": [
datetime.datetime(2013, 2, 14, 4, 48),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 21, 19, 12),
None,
datetime.datetime(2013, 11, 16, 4, 48),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 21, 19, 12),
None,
datetime.datetime(2014, 5, 19, 4, 48),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 23, 19, 12),
None,
datetime.datetime(2014, 11, 17, 4, 48),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 22, 19, 12),
None,
],
"y": [
33.01,
33.01,
34.2,
31.7,
34.1,
34.1,
None,
32.06,
32.06,
34.25,
31.62,
33.18,
33.18,
None,
33.05,
33.05,
33.25,
32.75,
33.1,
33.1,
None,
33.5,
33.5,
34.62,
32.87,
33.7,
33.7,
None,
],
},
{
"line": {"color": "#FF4136", "width": 1},
"mode": "lines",
"name": "Decreasing",
"showlegend": False,
"text": [
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
"Open",
"Open",
"High",
"Low",
"Close",
"Close",
"",
],
"type": "scatter",
"x": [
datetime.datetime(2013, 5, 18, 4, 48),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 22, 19, 12),
None,
datetime.datetime(2013, 8, 19, 4, 48),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 23, 19, 12),
None,
datetime.datetime(2014, 2, 15, 4, 48),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 22, 19, 12),
None,
datetime.datetime(2014, 8, 17, 4, 48),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 21, 19, 12),
None,
],
"y": [
33.31,
33.31,
34.37,
30.75,
31.93,
31.93,
None,
33.5,
33.5,
33.62,
32.87,
33.37,
33.37,
None,
34.12,
34.12,
35.18,
30.81,
31.18,
31.18,
None,
33.31,
33.31,
35.37,
32.75,
32.93,
32.93,
None,
],
},
],
"layout": {"hovermode": "closest", "xaxis": {"zeroline": False}},
}
self.assert_fig_equal(ohlc_d["data"][0], ex_ohlc_d["data"][0])
self.assert_fig_equal(ohlc_d["data"][1], ex_ohlc_d["data"][1])
self.assert_fig_equal(ohlc_d["layout"], ex_ohlc_d["layout"])
def test_datetime_candlestick(self):
# Check expected outcome for candlestick chart with datetime xaxis
high_data = [34.20, 34.37, 33.62, 34.25, 35.18, 33.25, 35.37, 34.62]
low_data = [31.70, 30.75, 32.87, 31.62, 30.81, 32.75, 32.75, 32.87]
close_data = [34.10, 31.93, 33.37, 33.18, 31.18, 33.10, 32.93, 33.70]
open_data = [33.01, 33.31, 33.50, 32.06, 34.12, 33.05, 33.31, 33.50]
x = [
datetime.datetime(year=2013, month=3, day=4),
datetime.datetime(year=2013, month=6, day=5),
datetime.datetime(year=2013, month=9, day=6),
datetime.datetime(year=2013, month=12, day=4),
datetime.datetime(year=2014, month=3, day=5),
datetime.datetime(year=2014, month=6, day=6),
datetime.datetime(year=2014, month=9, day=4),
datetime.datetime(year=2014, month=12, day=5),
]
candle = ff.create_candlestick(
open_data, high_data, low_data, close_data, dates=x
)
exp_candle = {
"data": [
{
"boxpoints": False,
"fillcolor": "#3D9970",
"line": {"color": "#3D9970"},
"name": "Increasing",
"showlegend": False,
"type": "box",
"whiskerwidth": 0,
"x": [
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 3, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2013, 12, 4, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 6, 6, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
datetime.datetime(2014, 12, 5, 0, 0),
],
"y": [
31.7,
33.01,
34.1,
34.1,
34.1,
34.2,
31.62,
32.06,
33.18,
33.18,
33.18,
34.25,
32.75,
33.05,
33.1,
33.1,
33.1,
33.25,
32.87,
33.5,
33.7,
33.7,
33.7,
34.62,
],
},
{
"boxpoints": False,
"fillcolor": "#FF4136",
"line": {"color": "#FF4136"},
"name": "Decreasing",
"showlegend": False,
"type": "box",
"whiskerwidth": 0,
"x": [
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 6, 5, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2013, 9, 6, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 3, 5, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
datetime.datetime(2014, 9, 4, 0, 0),
],
"y": [
30.75,
33.31,
31.93,
31.93,
31.93,
34.37,
32.87,
33.5,
33.37,
33.37,
33.37,
33.62,
30.81,
34.12,
31.18,
31.18,
31.18,
35.18,
32.75,
33.31,
32.93,
32.93,
32.93,
35.37,
],
},
],
"layout": {},
}
self.assert_fig_equal(candle["data"][0], exp_candle["data"][0])
self.assert_fig_equal(candle["data"][1], exp_candle["data"][1])
self.assert_fig_equal(candle["layout"], exp_candle["layout"])
class TestAnnotatedHeatmap(TestCaseNoTemplate, NumpyTestUtilsMixin):
def test_unequal_z_text_size(self):
# check: PlotlyError if z and text are not the same dimensions
kwargs = {"z": [[1, 2], [1, 2]], "annotation_text": [[1, 2, 3], [1]]}
self.assertRaises(PlotlyError, ff.create_annotated_heatmap, **kwargs)
kwargs = {"z": [[1], [1]], "annotation_text": [[1], [1], [1]]}
self.assertRaises(PlotlyError, ff.create_annotated_heatmap, **kwargs)
def test_incorrect_x_size(self):
# check: PlotlyError if x is the wrong size
kwargs = {"z": [[1, 2], [1, 2]], "x": ["A"]}
self.assertRaises(PlotlyError, ff.create_annotated_heatmap, **kwargs)
def test_incorrect_y_size(self):
# check: PlotlyError if y is the wrong size
kwargs = {"z": [[1, 2], [1, 2]], "y": [1, 2, 3]}
self.assertRaises(PlotlyError, ff.create_annotated_heatmap, **kwargs)
def test_simple_annotated_heatmap(self):
# we should be able to create a heatmap with annotated values with a
# logical text color
z = [[1, 0, 0.5], [0.25, 0.75, 0.45]]
a_heat = ff.create_annotated_heatmap(z)
expected_a_heat = {
"data": [
{
"colorscale": [
[0.0, "#0d0887"],
[0.1111111111111111, "#46039f"],
[0.2222222222222222, "#7201a8"],
[0.3333333333333333, "#9c179e"],
[0.4444444444444444, "#bd3786"],
[0.5555555555555556, "#d8576b"],
[0.6666666666666666, "#ed7953"],
[0.7777777777777778, "#fb9f3a"],
[0.8888888888888888, "#fdca26"],
[1.0, "#f0f921"],
],
"showscale": False,
"reversescale": False,
"type": "heatmap",
"z": [[1, 0, 0.5], [0.25, 0.75, 0.45]],
}
],
"layout": {
"annotations": [
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "1",
"x": 0,
"xref": "x",
"y": 0,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0",
"x": 1,
"xref": "x",
"y": 0,
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "0.5",
"x": 2,
"xref": "x",
"y": 0,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.25",
"x": 0,
"xref": "x",
"y": 1,
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "0.75",
"x": 1,
"xref": "x",
"y": 1,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.45",
"x": 2,
"xref": "x",
"y": 1,
"yref": "y",
},
],
"xaxis": {
"gridcolor": "rgb(0, 0, 0)",
"showticklabels": False,
"side": "top",
"ticks": "",
},
"yaxis": {"showticklabels": False, "ticks": "", "ticksuffix": " "},
},
}
self.assert_fig_equal(a_heat["data"][0], expected_a_heat["data"][0])
self.assert_fig_equal(a_heat["layout"], expected_a_heat["layout"])
def test_annotated_heatmap_kwargs(self):
# we should be able to create an annotated heatmap with x and y axes
# lables, a defined colorscale, and supplied text.
z = [[1, 0], [0.25, 0.75], [0.45, 0.5]]
text = [["first", "second"], ["third", "fourth"], ["fifth", "sixth"]]
a = ff.create_annotated_heatmap(
z,
x=["A", "B"],
y=["One", "Two", "Three"],
annotation_text=text,
colorscale=[[0, "rgb(255,255,255)"], [1, "#e6005a"]],
)
expected_a = {
"data": [
{
"colorscale": [[0, "rgb(255,255,255)"], [1, "#e6005a"]],
"showscale": False,
"reversescale": False,
"type": "heatmap",
"x": ["A", "B"],
"y": ["One", "Two", "Three"],
"z": [[1, 0], [0.25, 0.75], [0.45, 0.5]],
}
],
"layout": {
"annotations": [
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "first",
"x": "A",
"xref": "x",
"y": "One",
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "second",
"x": "B",
"xref": "x",
"y": "One",
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "third",
"x": "A",
"xref": "x",
"y": "Two",
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "fourth",
"x": "B",
"xref": "x",
"y": "Two",
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "fifth",
"x": "A",
"xref": "x",
"y": "Three",
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "sixth",
"x": "B",
"xref": "x",
"y": "Three",
"yref": "y",
},
],
"xaxis": {
"dtick": 1,
"gridcolor": "rgb(0, 0, 0)",
"side": "top",
"ticks": "",
},
"yaxis": {"dtick": 1, "ticks": "", "ticksuffix": " "},
},
}
self.assert_fig_equal(a["data"][0], expected_a["data"][0])
self.assert_fig_equal(a["layout"], expected_a["layout"])
def test_annotated_heatmap_reversescale(self):
# we should be able to create an annotated heatmap with x and y axes
# lables, a defined colorscale, and supplied text.
z = [[1, 0], [0.25, 0.75], [0.45, 0.5]]
text = [["first", "second"], ["third", "fourth"], ["fifth", "sixth"]]
a = ff.create_annotated_heatmap(
z,
x=["A", "B"],
y=["One", "Two", "Three"],
annotation_text=text,
reversescale=True,
colorscale=[[0, "rgb(255,255,255)"], [1, "#e6005a"]],
)
expected_a = {
"data": [
{
"colorscale": [[0, "rgb(255,255,255)"], [1, "#e6005a"]],
"showscale": False,
"reversescale": True,
"type": "heatmap",
"x": ["A", "B"],
"y": ["One", "Two", "Three"],
"z": [[1, 0], [0.25, 0.75], [0.45, 0.5]],
}
],
"layout": {
"annotations": [
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "first",
"x": "A",
"xref": "x",
"y": "One",
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "second",
"x": "B",
"xref": "x",
"y": "One",
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "third",
"x": "A",
"xref": "x",
"y": "Two",
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "fourth",
"x": "B",
"xref": "x",
"y": "Two",
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "fifth",
"x": "A",
"xref": "x",
"y": "Three",
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "sixth",
"x": "B",
"xref": "x",
"y": "Three",
"yref": "y",
},
],
"xaxis": {
"dtick": 1,
"gridcolor": "rgb(0, 0, 0)",
"side": "top",
"ticks": "",
},
"yaxis": {"dtick": 1, "ticks": "", "ticksuffix": " "},
},
}
self.assert_fig_equal(a["data"][0], expected_a["data"][0])
self.assert_fig_equal(a["layout"], expected_a["layout"])
def test_bug_1300(self):
# https://github.com/plotly/plotly.py/issues/1300
sub_z = [[0.1, 0.0, 0.0], [0.0, 1.0, 0.1]]
# sub_z = sub_z.tolist()
# Standard scale direction
fig = ff.create_annotated_heatmap(
sub_z, colorscale="Greens", showscale=True, reversescale=True
)
expected = graph_objs.Figure(
{
"data": [
{
"colorscale": [
[0.0, "rgb(247,252,245)"],
[0.125, "rgb(229,245,224)"],
[0.25, "rgb(199,233,192)"],
[0.375, "rgb(161,217,155)"],
[0.5, "rgb(116,196,118)"],
[0.625, "rgb(65,171,93)"],
[0.75, "rgb(35,139,69)"],
[0.875, "rgb(0,109,44)"],
[1.0, "rgb(0,68,27)"],
],
"reversescale": True,
"showscale": True,
"type": "heatmap",
"z": [[0.1, 0.0, 0.0], [0.0, 1.0, 0.1]],
}
],
"layout": {
"annotations": [
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.1",
"x": 0,
"xref": "x",
"y": 0,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.0",
"x": 1,
"xref": "x",
"y": 0,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.0",
"x": 2,
"xref": "x",
"y": 0,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.0",
"x": 0,
"xref": "x",
"y": 1,
"yref": "y",
},
{
"font": {"color": "#000000"},
"showarrow": False,
"text": "1.0",
"x": 1,
"xref": "x",
"y": 1,
"yref": "y",
},
{
"font": {"color": "#FFFFFF"},
"showarrow": False,
"text": "0.1",
"x": 2,
"xref": "x",
"y": 1,
"yref": "y",
},
],
"xaxis": {
"gridcolor": "rgb(0, 0, 0)",
"showticklabels": False,
"side": "top",
"ticks": "",
},
"yaxis": {"showticklabels": False, "ticks": "", "ticksuffix": " "},
},
}
)
# Remove uids
for trace in fig.data:
trace.update(uid=None)
for trace in expected.data:
trace.update(uid=None)
# Perform comparison
self.assert_fig_equal(fig, expected)
class TestTable(TestCaseNoTemplate, NumpyTestUtilsMixin):
def test_fontcolor_input(self):
# check: ValueError if fontcolor input is incorrect
kwargs = {
"table_text": [["one", "two"], [1, 2], [1, 2], [1, 2]],
"fontcolor": "#000000",
}
self.assertRaises(ValueError, ff.create_table, **kwargs)
kwargs = {
"table_text": [["one", "two"], [1, 2], [1, 2], [1, 2]],
"fontcolor": ["red", "blue"],
}
self.assertRaises(ValueError, ff.create_table, **kwargs)
def test_simple_table(self):
# we should be able to create a striped table by suppling a text matrix
text = [
["Country", "Year", "Population"],
["US", 2000, 282200000],
["Canada", 2000, 27790000],
["US", 1980, 226500000],
]
table = ff.create_table(text)
expected_table = {
"data": [
{
"colorscale": [[0, "#00083e"], [0.5, "#ededee"], [1, "#ffffff"]],
"hoverinfo": "none",
"opacity": 0.75,
"showscale": False,
"type": "heatmap",
"z": [[0, 0, 0], [0.5, 0.5, 0.5], [1, 1, 1], [0.5, 0.5, 0.5]],
}
],
"layout": {
"annotations": [
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Country</b>",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 0,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Year</b>",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 0,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Population</b>",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 0,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "US",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 1,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "2000",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 1,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "282200000",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 1,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "Canada",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 2,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "2000",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 2,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "27790000",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 2,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "US",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 3,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "1980",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 3,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "226500000",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 3,
"yref": "y",
},
],
"height": 170,
"margin": {"b": 0, "l": 0, "r": 0, "t": 0},
"xaxis": {
"dtick": 1,
"gridwidth": 2,
"showticklabels": False,
"tick0": -0.5,
"ticks": "",
"zeroline": False,
},
"yaxis": {
"autorange": "reversed",
"dtick": 1,
"gridwidth": 2,
"showticklabels": False,
"tick0": 0.5,
"ticks": "",
"zeroline": False,
},
},
}
self.assert_fig_equal(table["data"][0], expected_table["data"][0])
self.assert_fig_equal(table["layout"], expected_table["layout"])
def test_table_with_index(self):
# we should be able to create a striped table where the first column
# matches the coloring of the header
text = [
["Country", "Year", "Population"],
["US", 2000, 282200000],
["Canada", 2000, 27790000],
]
index_table = ff.create_table(text, index=True, index_title="Title")
exp_index_table = {
"data": [
{
"colorscale": [[0, "#00083e"], [0.5, "#ededee"], [1, "#ffffff"]],
"hoverinfo": "none",
"opacity": 0.75,
"showscale": False,
"type": "heatmap",
"z": [[0, 0, 0], [0, 0.5, 0.5], [0, 1, 1]],
}
],
"layout": {
"annotations": [
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Country</b>",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 0,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Year</b>",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 0,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Population</b>",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 0,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>US</b>",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 1,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "2000",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 1,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "282200000",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 1,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#ffffff"},
"showarrow": False,
"text": "<b>Canada</b>",
"x": -0.45,
"xanchor": "left",
"xref": "x",
"y": 2,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "2000",
"x": 0.55,
"xanchor": "left",
"xref": "x",
"y": 2,
"yref": "y",
},
{
"align": "left",
"font": {"color": "#000000"},
"showarrow": False,
"text": "27790000",
"x": 1.55,
"xanchor": "left",
"xref": "x",
"y": 2,
"yref": "y",
},
],
"height": 140,
"margin": {"b": 0, "l": 0, "r": 0, "t": 0},
"xaxis": {
"dtick": 1,
"gridwidth": 2,
"showticklabels": False,
"tick0": -0.5,
"ticks": "",
"zeroline": False,
},
"yaxis": {
"autorange": "reversed",
"dtick": 1,
"gridwidth": 2,
"showticklabels": False,
"tick0": 0.5,
"ticks": "",
"zeroline": False,
},
},
}
self.assert_fig_equal(index_table["data"][0], exp_index_table["data"][0])
self.assert_fig_equal(index_table["layout"], exp_index_table["layout"])
class TestGantt(TestCaseNoTemplate, NumpyTestUtilsMixin):
def test_validate_gantt(self):
# validate the basic gantt inputs
df = [
{
"Task": "Job A",
"Start": "2009-02-01",
"Finish": "2009-08-30",
"Complete": "a",
}
]
pattern2 = (
"In order to use an indexing column and assign colors to "
"the values of the index, you must choose an actual "
"column name in the dataframe or key if a list of "
"dictionaries is being used."
)
self.assertRaisesRegexp(
PlotlyError, pattern2, ff.create_gantt, df, index_col="foo"
)
df = "foo"
pattern3 = "You must input either a dataframe or a list of " "dictionaries."
self.assertRaisesRegexp(PlotlyError, pattern3, ff.create_gantt, df)
df = []
pattern4 = "Your list is empty. It must contain at least one " "dictionary."
self.assertRaisesRegexp(PlotlyError, pattern4, ff.create_gantt, df)
df = ["foo"]
pattern5 = "Your list must only include dictionaries."
self.assertRaisesRegexp(PlotlyError, pattern5, ff.create_gantt, df)
def test_gantt_index(self):
# validate the index used for gantt
df = [
{
"Task": "Job A",
"Start": "2009-02-01",
"Finish": "2009-08-30",
"Complete": 50,
}
]
pattern = (
"In order to use an indexing column and assign colors to "
"the values of the index, you must choose an actual "
"column name in the dataframe or key if a list of "
"dictionaries is being used."
)
self.assertRaisesRegexp(
PlotlyError, pattern, ff.create_gantt, df, index_col="foo"
)
df = [
{
"Task": "Job A",
"Start": "2009-02-01",
"Finish": "2009-08-30",
"Complete": "a",
},
{
"Task": "Job A",
"Start": "2009-02-01",
"Finish": "2009-08-30",
"Complete": 50,
},
]
pattern2 = (
"Error in indexing column. Make sure all entries of each "
"column are all numbers or all strings."
)
self.assertRaisesRegexp(
PlotlyError, pattern2, ff.create_gantt, df, index_col="Complete"
)
def test_gantt_validate_colors(self):
# validate the gantt colors variable
df = [
{
"Task": "Job A",
"Start": "2009-02-01",
"Finish": "2009-08-30",
"Complete": 75,
"Resource": "A",
},
{
"Task": "Job B",
"Start": "2009-02-01",
"Finish": "2009-08-30",
"Complete": 50,
"Resource": "B",
},
]
pattern = (
"Whoops! The elements in your rgb colors tuples cannot " "exceed 255.0."
)
self.assertRaisesRegexp(
PlotlyError,
pattern,
ff.create_gantt,
df,
index_col="Complete",
colors="rgb(300,1,1)",
)
self.assertRaises(
PlotlyError, ff.create_gantt, df, index_col="Complete", colors="foo"
)
pattern2 = "Whoops! The elements in your colors tuples cannot " "exceed 1.0."
self.assertRaisesRegexp(
PlotlyError,
pattern2,
ff.create_gantt,
df,
index_col="Complete",
colors=(2, 1, 1),
)
# verify that if colors is a dictionary, its keys span all the
# values in the index column
colors_dict = {75: "rgb(1, 2, 3)"}
pattern3 = (
"If you are using colors as a dictionary, all of its "
"keys must be all the values in the index column."
)
self.assertRaisesRegexp(
PlotlyError,
pattern3,
ff.create_gantt,
df,
index_col="Complete",
colors=colors_dict,
)
# check: index is set if colors is a dictionary
colors_dict_good = {50: "rgb(1, 2, 3)", 75: "rgb(5, 10, 15)"}
pattern4 = (
"Error. You have set colors to a dictionary but have not "
"picked an index. An index is required if you are "
"assigning colors to particular values in a dictioanry."
)
self.assertRaisesRegexp(
PlotlyError, pattern4, ff.create_gantt, df, colors=colors_dict_good
)
# check: number of colors is equal to or greater than number of
# unique index string values
pattern5 = (
"Error. The number of colors in 'colors' must be no less "
"than the number of unique index values in your group "
"column."
)
self.assertRaisesRegexp(
PlotlyError,
pattern5,
ff.create_gantt,
df,
index_col="Resource",
colors=["#ffffff"],
)
# check: if index is numeric, colors has at least 2 colors in it
pattern6 = (
"You must use at least 2 colors in 'colors' if you "
"are using a colorscale. However only the first two "
"colors given will be used for the lower and upper "
"bounds on the colormap."
)
self.assertRaisesRegexp(
PlotlyError,
pattern6,
ff.create_gantt,
df,
index_col="Complete",
colors=["#ffffff"],
)
def test_gannt_groups_and_descriptions(self):
# check if grouped gantt chart matches with expected output
df = [
dict(
Task="Task A",
Description="Task A - 1",
Start="2008-10-05",
Finish="2009-04-15",
IndexCol="TA",
),
dict(
Task="Task B",
Description="Task B - 1",
Start="2008-12-06",
Finish="2009-03-15",
IndexCol="TB",
),
dict(
Task="Task C",
Description="Task C - 1",
Start="2008-09-07",
Finish="2009-03-15",
IndexCol="TC",
),
dict(
Task="Task C",
Description="Task C - 2",
Start="2009-05-08",
Finish="2009-04-15",
IndexCol="TC",
),
dict(
Task="Task A",
Description="Task A - 2",
Start="2009-04-20",
Finish="2009-05-30",
IndexCol="TA",
),
]
test_gantt_chart = ff.create_gantt(
df,
colors=dict(TA="rgb(220, 0, 0)", TB="rgb(170, 14, 200)", TC=(1, 0.9, 0.16)),
show_colorbar=True,
index_col="IndexCol",
group_tasks=True,
)
exp_gantt_chart = graph_objs.Figure(
**{
"layout": {
"showlegend": True,
"yaxis": {
"range": [-1, 4],
"zeroline": False,
"ticktext": ["Task C", "Task B", "Task A"],
"tickvals": [0, 1, 2],
"autorange": False,
"showgrid": False,
},
"title": "Gantt Chart",
"height": 600,
"shapes": [],
"width": 900,
"xaxis": {
"zeroline": False,
"rangeselector": {
"buttons": [
{
"count": 7,
"step": "day",
"stepmode": "backward",
"label": "1w",
},
{
"count": 1,
"step": "month",
"stepmode": "backward",
"label": "1m",
},
{
"count": 6,
"step": "month",
"stepmode": "backward",
"label": "6m",
},
{
"count": 1,
"step": "year",
"stepmode": "todate",
"label": "YTD",
},
{
"count": 1,
"step": "year",
"stepmode": "backward",
"label": "1y",
},
{"step": "all"},
]
},
"type": "date",
"showgrid": False,
},
"hovermode": "closest",
},
"data": [
{
"legendgroup": "rgb(170, 14, 200)",
"name": "TB",
"fillcolor": "rgb(170, 14, 200)",
"mode": "none",
"hoverinfo": "name",
"y": [0.8, 0.8, 1.2, 1.2],
"x": ["2008-12-06", "2009-03-15", "2009-03-15", "2008-12-06"],
"fill": "toself",
},
{
"legendgroup": "rgb(220, 0, 0)",
"name": "TA",
"fillcolor": "rgb(220, 0, 0)",
"mode": "none",
"hoverinfo": "name",
"y": [1.8, 1.8, 2.2, 2.2, None, 1.8, 1.8, 2.2, 2.2],
"x": [
"2008-10-05",
"2009-04-15",
"2009-04-15",
"2008-10-05",
"2008-10-05",
"2009-04-20",
"2009-05-30",
"2009-05-30",
"2009-04-20",
],
"fill": "toself",
},
{
"legendgroup": "rgb(255, 230, 41)",
"name": "TC",
"fillcolor": "rgb(255, 230, 41)",
"mode": "none",
"hoverinfo": "name",
"y": [-0.2, -0.2, 0.2, 0.2, None, -0.2, -0.2, 0.2, 0.2],
"x": [
"2008-09-07",
"2009-03-15",
"2009-03-15",
"2008-09-07",
"2008-09-07",
"2009-05-08",
"2009-04-15",
"2009-04-15",
"2009-05-08",
],
"fill": "toself",
},
{
"showlegend": False,
"legendgroup": "rgb(170, 14, 200)",
"name": "",
"text": ["Task B - 1", "Task B - 1"],
"y": [1, 1],
"mode": "markers",
"marker": {
"opacity": 0,
"color": "rgb(170, 14, 200)",
"size": 1,
},
"x": ["2008-12-06", "2009-03-15"],
},
{
"showlegend": False,
"legendgroup": "rgb(220, 0, 0)",
"name": "",
"text": [
"Task A - 1",
"Task A - 1",
"Task A - 2",
"Task A - 2",
],
"y": [2, 2, 2, 2],
"mode": "markers",
"marker": {"opacity": 0, "color": "rgb(220, 0, 0)", "size": 1},
"x": ["2008-10-05", "2009-04-15", "2009-04-20", "2009-05-30"],
},
{
"showlegend": False,
"legendgroup": "rgb(255, 230, 41)",
"name": "",
"text": [
"Task C - 1",
"Task C - 1",
"Task C - 2",
"Task C - 2",
],
"y": [0, 0, 0, 0],
"mode": "markers",
"marker": {
"opacity": 0,
"color": "rgb(255, 230, 41)",
"size": 1,
},
"x": ["2008-09-07", "2009-03-15", "2009-05-08", "2009-04-15"],
},
],
}
)
self.assert_fig_equal(test_gantt_chart["data"][0], exp_gantt_chart["data"][0])
self.assert_fig_equal(test_gantt_chart["data"][1], exp_gantt_chart["data"][1])
self.assert_fig_equal(test_gantt_chart["data"][2], exp_gantt_chart["data"][2])
self.assert_fig_equal(test_gantt_chart["data"][3], exp_gantt_chart["data"][3])
def test_gantt_all_args(self):
# check if gantt chart matches with expected output
df = [
{
"Task": "Run",
"Start": "2010-01-01",
"Finish": "2011-02-02",
"Complete": 0,
},
{
"Task": "Fast",
"Start": "2011-01-01",
"Finish": "2012-06-05",
"Complete": 25,
},
]
test_gantt_chart = ff.create_gantt(
df,
colors="Blues",
index_col="Complete",
reverse_colors=True,
title="Title",
bar_width=0.5,
showgrid_x=True,
showgrid_y=True,
height=500,
width=500,
)
exp_gantt_chart = graph_objs.Figure(
**{
"data": [
{
"x": ["2011-01-01", "2012-06-05", "2012-06-05", "2011-01-01"],
"y": [0.5, 0.5, 1.5, 1.5],
"mode": "none",
"fill": "toself",
"showlegend": False,
"hoverinfo": "name",
"legendgroup": "rgb(166.25, 167.5, 208.0)",
"fillcolor": "rgb(166.25, 167.5, 208.0)",
"name": "25",
},
{
"x": ["2010-01-01", "2011-02-02", "2011-02-02", "2010-01-01"],
"y": [-0.5, -0.5, 0.5, 0.5],
"mode": "none",
"fill": "toself",
"showlegend": False,
"hoverinfo": "name",
"legendgroup": "rgb(220.0, 220.0, 220.0)",
"fillcolor": "rgb(220.0, 220.0, 220.0)",
"name": "0",
},
{
"x": ["2011-01-01", "2012-06-05"],
"y": [1, 1],
"mode": "markers",
"text": [None, None],
"marker": {
"color": "rgb(166.25, 167.5, 208.0)",
"size": 1,
"opacity": 0,
},
"name": "",
"showlegend": False,
"legendgroup": "rgb(166.25, 167.5, 208.0)",
},
{
"x": ["2010-01-01", "2011-02-02"],
"y": [0, 0],
"mode": "markers",
"text": [None, None],
"marker": {
"color": "rgb(220.0, 220.0, 220.0)",
"size": 1,
"opacity": 0,
},
"name": "",
"showlegend": False,
"legendgroup": "rgb(220.0, 220.0, 220.0)",
},
],
"layout": {
"title": "Title",
"showlegend": False,
"height": 500,
"width": 500,
"shapes": [],
"hovermode": "closest",
"yaxis": {
"showgrid": True,
"ticktext": ["Run", "Fast"],
"tickvals": [0, 1],
"range": [-1, 3],
"autorange": False,
"zeroline": False,
},
"xaxis": {
"showgrid": True,
"zeroline": False,
"rangeselector": {
"buttons": [
{
"count": 7,
"label": "1w",
"step": "day",
"stepmode": "backward",
},
{
"count": 1,
"label": "1m",
"step": "month",
"stepmode": "backward",
},
{
"count": 6,
"label": "6m",
"step": "month",
"stepmode": "backward",
},
{
"count": 1,
"label": "YTD",
"step": "year",
"stepmode": "todate",
},
{
"count": 1,
"label": "1y",
"step": "year",
"stepmode": "backward",
},
{"step": "all"},
]
},
"type": "date",
},
},
}
)
self.assert_fig_equal(test_gantt_chart["data"][0], exp_gantt_chart["data"][0])
self.assert_fig_equal(test_gantt_chart["data"][1], exp_gantt_chart["data"][1])
self.assert_fig_equal(test_gantt_chart["data"][2], exp_gantt_chart["data"][2])
self.assert_fig_equal(test_gantt_chart["layout"], exp_gantt_chart["layout"])
class Test2D_Density(TestCaseNoTemplate, NumpyTestUtilsMixin):
def test_validate_2D_density(self):
# validate that x and y contain only numbers
x = [1, 2]
y = ["a", 2]
pattern = "All elements of your 'x' and 'y' lists must be numbers."
self.assertRaisesRegexp(PlotlyError, pattern, ff.create_2d_density, x, y)
# validate that x and y are the same length
x2 = [1]
y2 = [1, 2]
pattern2 = "Both lists 'x' and 'y' must be the same length."
self.assertRaisesRegexp(PlotlyError, pattern2, ff.create_2d_density, x2, y2)
def test_2D_density_all_args(self):
# check if 2D_density data matches with expected output
x = [1, 2]
y = [2, 4]
colorscale = [
"#7A4579",
"#D56073",
"rgb(236,158,105)",
(1, 1, 0.2),
(0.98, 0.98, 0.98),
]
test_2D_density_chart = ff.create_2d_density(
x,
y,
colorscale=colorscale,
hist_color="rgb(255,237,222)",
point_size=3,
height=800,
width=800,
)
exp_2D_density_chart = {
"data": [
{
"marker": {"color": "rgb(0, 0, 128)", "opacity": 0.4, "size": 3},
"mode": "markers",
"name": "points",
"type": "scatter",
"x": [1, 2],
"y": [2, 4],
},
{
"colorscale": [
[0.0, "rgb(122, 69, 121)"],
[0.25, "rgb(213, 96, 115)"],
[0.5, "rgb(236, 158, 105)"],
[0.75, "rgb(255, 255, 51)"],
[1.0, "rgb(250, 250, 250)"],
],
"name": "density",
"ncontours": 20,
"reversescale": True,
"showscale": False,
"type": "histogram2dcontour",
"x": [1, 2],
"y": [2, 4],
},
{
"marker": {"color": "rgb(255, 237, 222)"},
"name": "x density",
"type": "histogram",
"x": [1, 2],
"yaxis": "y2",
},
{
"marker": {"color": "rgb(255, 237, 222)"},
"name": "y density",
"type": "histogram",
"xaxis": "x2",
"y": [2, 4],
},
],
"layout": {
"autosize": False,
"bargap": 0,
"height": 800,
"hovermode": "closest",
"margin": {"t": 50},
"showlegend": False,
"title": {"text": "2D Density Plot"},
"width": 800,
"xaxis": {"domain": [0, 0.85], "showgrid": False, "zeroline": False},
"xaxis2": {"domain": [0.85, 1], "showgrid": False, "zeroline": False},
"yaxis": {"domain": [0, 0.85], "showgrid": False, "zeroline": False},
"yaxis2": {"domain": [0.85, 1], "showgrid": False, "zeroline": False},
},
}
self.assert_fig_equal(
test_2D_density_chart["data"][0], exp_2D_density_chart["data"][0]
)
self.assert_fig_equal(
test_2D_density_chart["data"][1], exp_2D_density_chart["data"][1]
)
self.assert_fig_equal(
test_2D_density_chart["data"][2], exp_2D_density_chart["data"][2]
)
self.assert_fig_equal(
test_2D_density_chart["data"][3], exp_2D_density_chart["data"][3]
)
self.assert_fig_equal(
test_2D_density_chart["layout"], exp_2D_density_chart["layout"]
)
|
plotly/python-api
|
packages/python/plotly/plotly/tests/test_optional/test_tools/test_figure_factory.py
|
Python
|
mit
| 81,949
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# CKAN-Toolbox -- Various modules that handle CKAN API and data
# By: Emmanuel Raviart <emmanuel@raviart.com>
#
# Copyright (C) 2013 Etalab
# http://github.com/etalab/ckan-toolbox
#
# This file is part of CKAN-Toolbox.
#
# CKAN-Toolbox is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# CKAN-Toolbox is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Helpers to handle strings"""
import re
from biryani1 import strings
tag_char_re = re.compile(ur'[- \w]', re.UNICODE)
def namify(text, encoding = 'utf-8'):
"""Convert a string to a CKAN name."""
if text is None:
return None
if isinstance(text, str):
text = text.decode(encoding)
assert isinstance(text, unicode), str((text,))
simplified = u''.join(namify_char(unicode_char) for unicode_char in text)
# CKAN accepts names with duplicate "-" or "_" and/or ending with "-" or "_".
#while u'--' in simplified:
# simplified = simplified.replace(u'--', u'-')
#while u'__' in simplified:
# simplified = simplified.replace(u'__', u'_')
#simplified = simplified.strip(u'-_')
return simplified
def namify_char(unicode_char):
"""Convert an unicode character to a subset of lowercase ASCII characters or an empty string.
The result can be composed of several characters (for example, 'œ' becomes 'oe').
"""
chars = strings.unicode_char_to_ascii(unicode_char)
if chars:
chars = chars.lower()
split_chars = []
for char in chars:
if char not in '-_0123456789abcdefghijklmnopqrstuvwxyz':
char = '-'
split_chars.append(char)
chars = ''.join(split_chars)
return chars
def tag_namify(text, encoding = 'utf-8'):
"""Convert a string to a CKAN tag name."""
if text is None:
return None
if isinstance(text, str):
text = text.decode(encoding)
assert isinstance(text, unicode), str((text,))
simplified = u''.join(tag_namify_char(unicode_char) for unicode_char in text)
# CKAN accepts tag names with duplicate "-" or "_" and/or ending with "-" or "_".
#while u'--' in simplified:
# simplified = simplified.replace(u'--', u'-')
#while u'__' in simplified:
# simplified = simplified.replace(u'__', u'_')
#simplified = simplified.strip(u'-_')
return simplified
def tag_namify_char(unicode_char):
"""Convert an unicode character to a subset of lowercase characters or an empty string."""
unicode_char = unicode_char.lower()
if tag_char_re.match(unicode_char) is None:
unicode_char = u'-'
return unicode_char
|
etalab/ckan-toolbox
|
ckantoolbox/texthelpers.py
|
Python
|
agpl-3.0
| 3,182
|
import asyncio
import subprocess
import discord
from discord.ext import commands
from dinnerplate import BaseCog, JsonConfigManager
from utils import reaction_menu
from utils.errors import UserBlacklisted
CONFIG = {
"blacklist": [],
"additional_statuses": []
}
class Admin(BaseCog):
def __init__(self, bot):
super().__init__(bot)
self.statuses = ["Long Live GAF",
"http://www.neverendinggaf.com",
self.sum_users_and_guilds,
self.uptime,
self.commands_run
]
self.bot.add_check(self.user_in_blacklist_check)
self.config = JsonConfigManager("admin.json", default=CONFIG)
self.statuses.extend(self.config["additional_statuses"])
self.bg_task = self.bot.loop.create_task(self.status_rotator())
def sum_users_and_guilds(self):
_, guilds, _, users, = self.bot.stats
return "{} users in {} guilds".format(users, guilds)
def uptime(self):
time = self.bot.uptime
return "{}d, {}h, {}m, {}s".format(time[1][3], time[1][2], time[1][1], time[1][0])
def commands_run(self):
return "{} commands ran".format(self.bot._command_count)
async def status_rotator(self):
await self.bot.wait_until_ready()
while not self.bot.is_closed():
for val in self.statuses:
if callable(val):
val = val()
await self.bot.change_presence(activity=discord.Game(name=val))
await asyncio.sleep(60)
@commands.group(invoke_without_command=True)
@commands.is_owner()
async def guilds(self, ctx):
"""
Lists all of the guilds the bot is in
"""
guilds = [g.name for g in self.bot.guilds]
await reaction_menu.start_reaction_menu(self.bot, guilds, ctx.author, ctx.channel, count=0,
timeout=60, per_page=20, header="name jeff")
@guilds.command()
@commands.is_owner()
async def leave(self, ctx, *, guild):
"""
Leaves a specified guild
"""
guild = discord.utils.find(lambda s: s.name == guild or str(s.id) == guild, self.bot.guilds)
if guild is None:
await ctx.send("Unable to locate guild")
return
try:
await guild.leave()
await ctx.send("`Successfully left the guild`")
except discord.HTTPException:
await ctx.send("`Leaving the guild failed!`")
@guilds.command()
@commands.is_owner()
async def invite(self, ctx, *, guild):
"""
Creates an invite to a specified server
"""
guild = discord.utils.find(lambda s: s.name == guild or str(s.id) == guild, self.bot.guilds)
if guild is None:
await ctx.send("`Unable to locate guild`")
return
for channel in guild.channels:
if isinstance(channel, discord.TextChannel):
try:
invite = await channel.create_invite()
await ctx.send("`Created an invite to guild, I will DM it to you`")
dm_channel = ctx.author.dm_channel
if dm_channel is None:
dm_channel = await ctx.author.create_dm()
await dm_channel.send(invite.url)
break
except discord.HTTPException:
await ctx.send("`Failed to create invite for guild!`")
@commands.is_owner()
@guilds.command()
async def top(self, ctx):
"""
Get's the top 25 guilds with most members
"""
ordered = sorted(self.bot.guilds, key=lambda g: len(g.members), reverse=True)[:25]
embed = discord.Embed(title="Top 25 Guilds Ordered by Member Count", colour=discord.Colour.gold())
for guild in ordered:
embed.add_field(name=guild.name, value=f"Count: {len(guild.members)}\nID: {guild.id}", inline=False)
await ctx.send(embed=embed)
@commands.command()
@commands.is_owner()
async def update_avatar(self, ctx, file=None):
file = file or "avatar.jpg"
try:
with open(f"resources/{file}", "rb") as f:
await self.bot.user.edit(avatar=f.read())
await ctx.send("Updated avatar, am I pretty yet?")
except FileNotFoundError:
await ctx.send(f"Couldn't find `resources/{file}`")
@commands.command(hidden=True)
@commands.is_owner()
async def purge_bot_guilds(self, ctx):
"""
Purges guilds with a bot:user ratio higher than 2:1
"""
passed = 0
failed = 0
success = 0
for g in self.bot.guilds:
s = await self.compare_bots_users(g)
if s == 0:
success += 1
elif s == 1:
failed += 1
else:
passed += 1
await ctx.send(
f"**Finished cleaning guilds with a `bot:user` ratio higher than `2:1`**\n"
f"Left `{success}` guilds\n"
f"Failed Leaving: `{failed}` guilds\n"
f"Ignored `{passed}` guilds\n"
)
@commands.command()
@commands.is_owner()
async def blacklist(self, ctx, user: discord.User):
"""
Blacklists or unblacklist a user
"""
if user.id in self.config["blacklist"]:
self.config["blacklist"].remove(user.id)
await ctx.send(f"Unblacklisted user {user} from the bot")
else:
self.config["blacklist"].append(user.id)
await ctx.send(f"Blacklisted user {user} from the bot")
@commands.is_owner()
@commands.command()
async def update(self, ctx):
"""
Checks if their is an update available
"""
await ctx.send("Calling process to update! :up: :date: ")
try:
done = subprocess.run("git pull", shell=True, stdout=subprocess.PIPE, timeout=30)
if done:
message = done.stdout.decode()
await ctx.send("`{}`".format(message))
if message == "Already up-to-date.\n":
await ctx.send("No update available :no_entry:")
else:
await ctx.send("Succesfully updated! Rebooting now :repeat: ")
await self.bot.logout()
except subprocess.CalledProcessError:
await ctx.send("Error updating! :exclamation: ")
except subprocess.TimeoutExpired:
await ctx.send("Error updating - Process timed out! :exclamation: ")
@commands.is_owner()
@commands.group(invoke_without_command=True)
async def statuses(self, ctx):
"""
Displays all custom statuses
"""
await ctx.send(", ".join([s for s in self.statuses if not callable(s)]))
@commands.is_owner()
@statuses.command()
async def add(self, ctx, *, status: str):
"""
Adds a new status
"""
if status not in self.statuses:
self.statuses.append(status)
self.config["additional_statuses"].append(status)
await ctx.send("`Added new status!`")
else:
await ctx.send("`Status already exists dumb dumb...`")
@commands.is_owner()
@statuses.command()
async def remove(self, ctx, *, status: str):
"""
Removes a status
"""
try:
self.config["additional_statuses"].remove(status)
self.statuses.remove(status)
await ctx.send("`Removed status!`")
except ValueError:
await ctx.send("`Status did not exist`")
@commands.command()
@commands.is_owner()
async def remove_bans(self, ctx):
bans = await ctx.guild.bans()
for ban in bans:
await ctx.guild.unban(ban.user)
def user_in_blacklist_check(self, ctx):
"""
Checks whether a user is in the bot blacklist
"""
user_id = ctx.author.id
if user_id in self. config["blacklist"]:
raise UserBlacklisted
else:
return True
@commands.Cog.listener()
async def on_guild_join(self, guild):
await self.compare_bots_users(guild)
async def compare_bots_users(self, guild):
b = 0
u = 0
for m in guild.members:
if m.bot:
b += 1
else:
u += 1
self.logger.debug(f"{guild} [{guild.id}] Evaluated bot to user ratio for guild - Users: {u} Bots: {b}")
if (b / 2) > u:
self.logger.debug(f"{guild} [{guild.id}] ratio too high, attempting to leave")
try:
await guild.leave()
self.logger.debug(f"{guild} [{guild.id}] left guild successfully")
return 0 # left
except discord.HTTPException:
self.logger.debug(f"{guild} [{guild.id}] failed leaving guild")
return 1 # error
else:
self.logger.debug(f"{guild} [{guild.id}] Ratio OK, not leaving guild")
return 2 # nothing
setup = Admin.setup
|
DiNitride/GAFBot
|
modules/admin.py
|
Python
|
mit
| 9,254
|
from django.core.management.base import BaseCommand, CommandError
from django.core.exceptions import ObjectDoesNotExist
from stratosource.admin.models import Branch, Commit, Delta, TranslationDelta, DeployableObject
import subprocess
import popen2
from datetime import datetime
import os
class Command(BaseCommand):
def handle(self, *args, **options):
if len(args) < 2: raise CommandError('usage: <repo name> <branch>')
br = Branch.objects.get(repo__name__exact=args[0], name__exact=args[1])
if not br: raise CommandException("invalid repo/branch")
dolist = DeployableObject.objects.filter(branch=br).order_by('filename','status')
domap = {}
for dobj in dolist:
if not domap.has_key(dobj.filename): domap[dobj.filename] = []
domap[dobj.filename].append(dobj)
for filename,recs in domap.items():
if len(recs) > 1:
print '%s %d' % (filename, len(recs))
|
StratoSource/StratoSource
|
stratosource/management/commands/fixer.py
|
Python
|
gpl-3.0
| 970
|
# coding: utf-8
__author__ = 'Paul Cunningham'
__copyright = 'Copyright 2017, Paul Cunningham'
from sqlalchemy import or_, and_
from flask_select2._compat import as_unicode, string_types
from flask_select2.model.ajax import AjaxModelLoader, DEFAULT_PAGE_SIZE
from .tools import get_primary_key, has_multiple_pks, is_relationship, is_association_proxy
class QueryAjaxModelLoader(AjaxModelLoader):
def __init__(self, name, session, model, **options):
"""
Constructor.
:param fields:
Fields to run query against
:param filters:
Additional filters to apply to the loader
"""
super(QueryAjaxModelLoader, self).__init__(name, options)
self.session = session
self.model = model
self.fields = options.get('fields')
self.order_by = options.get('order_by')
self.filters = options.get('filters')
if not self.fields:
raise ValueError('AJAX loading requires `fields` to be specified for %s.%s' % (model, self.name))
self._cached_fields = self._process_fields()
if has_multiple_pks(model):
raise NotImplementedError('Flask-Select2 does not support multi-pk AJAX model loading.')
self.pk = get_primary_key(model)
def _process_fields(self):
remote_fields = []
for field in self.fields:
if isinstance(field, string_types):
attr = getattr(self.model, field, None)
if not attr:
raise ValueError('%s.%s does not exist.' % (self.model, field))
remote_fields.append(attr)
else:
# TODO: Figure out if it is valid SQLAlchemy property?
remote_fields.append(field)
return remote_fields
def format(self, model):
if not model:
return None
return (getattr(model, self.pk), as_unicode(model))
def get_query(self):
return self.session.query(self.model)
def get_one(self, pk):
# prevent autoflush from occuring during populate_obj
with self.session.no_autoflush:
return self.session.query(self.model).get(pk)
def get_list(self, term, offset=0, limit=DEFAULT_PAGE_SIZE):
query = self.get_query()
filters = (field.ilike(u'%%%s%%' % term) for field in self._cached_fields)
query = query.filter(or_(*filters))
if self.filters:
filters = ["%s.%s" % (self.model.__name__.lower(), value) for value in self.filters]
query = query.filter(and_(*filters))
if self.order_by:
for _order_by in self.order_by:
query = query.order_by(_order_by)
return query.offset(offset).limit(limit).all()
def create_ajax_loader(model, session, name, field_name, options):
attr = getattr(model, field_name, None)
if attr is None:
raise ValueError('Model %s does not have field %s.' % (model, field_name))
if not is_relationship(attr) and not is_association_proxy(attr):
raise ValueError('%s.%s is not a relation.' % (model, field_name))
if is_association_proxy(attr):
attr = attr.remote_attr
remote_model = attr.prop.mapper.class_
return QueryAjaxModelLoader(name, session, remote_model, **options)
|
pjcunningham/Flask-Select2
|
flask_select2/contrib/sqla/ajax.py
|
Python
|
mit
| 3,339
|
#!/usr/bin/env python3
import asyncio
import aiohttp
import sys
import requests
import json
import sys
import string
def eprint(message):
print(message, file=sys.stderr)
if len(sys.argv) == 3:
base_url = sys.argv[1]
apfrom = sys.argv[2]
eprint("Restarting '{}' from '{}'".format(base_url, apfrom))
elif len(sys.argv) == 2:
base_url = sys.argv[1]
apfrom = None
eprint("Starting '{}' from scratch".format(base_url))
else:
eprint("Usage: ./dump.py BASE_URL [FROM]")
sys.exit(1)
if not base_url.endswith("/"):
base_url += "/"
WIKI_ALL_PAGES = (
"{}w/api.php?action=query&list=allpages&format=json&aplimit=500&apfrom={}"
)
WIKI_HTML = "{}api/rest_v1/page/html/{}"
WIKI_METADATA = "{}api/rest_v1/page/metadata/{}"
VALID = set(string.punctuation) + set("qwertyuiopasdfghjklzxcvbnm _")
async def iter_titles(session):
global apfrom
apfrom_ = apfrom if apfrom is not None else ""
apfrom = apfrom_
while True:
url = WIKI_ALL_PAGES.format(base_url, apfrom)
async with session.get(url) as response:
items = await response.json()
for item in items["query"]["allpages"]:
title = item["title"]
surface = set(title) - VALID
if surface:
continue
else:
yield item["title"]
# continue?
apfrom = items.get("continue", {}).get("apcontinue")
if apfrom is None:
break
async def dump(title, session):
for _ in range(5):
try:
# html
url = WIKI_HTML.format(base_url, title)
html = await session.get(url)
html = await html.text()
# metadata
url = WIKI_METADATA.format(base_url, title)
metadata = await session.get(url)
metadata = await metadata.json()
out = dict(title=title, html=html, metadata=metadata)
print(json.dumps(out))
except:
continue
COUNT = 50
async def crawler(lock, session, generator):
while True:
try:
async with lock:
title = generator.__anext__()
title = await title
await dump(title, session)
except StopAsyncIteration:
global COUNT
COUNT -= 1
return
async def main():
session = aiohttp.ClientSession()
lock = asyncio.Lock()
generator = iter_titles(session)
for _ in range(COUNT):
asyncio.create_task(crawler(lock, session, generator))
while True:
if COUNT == 0:
await session.close()
return
else:
await asyncio.sleep(1)
asyncio.run(main())
|
amirouche/AjguDB
|
examples/wiki/dump.py
|
Python
|
gpl-2.0
| 2,756
|
from django.test import TransactionTestCase
from django.core.files import File
from django.contrib.auth.models import User
from ...update import (
update_grant_facts_v2,
)
from ...utils import import_data
from ...models import (
GrantFactsV2,
GrantFactsV2Update,
)
from ..resources import GrantFactsV2Resource
FIXTURES_PATH = "municipal_finance/fixtures/tests/update/grant_facts_v2"
class UpdateAgedDebtorFactsV2(TransactionTestCase):
serialized_rollback = True
def setUp(self):
import_data(
GrantFactsV2Resource,
f"{FIXTURES_PATH}/grant_facts_v2.csv",
)
self.user = User.objects.create_user(
username="sample", email="sample@some.co", password="testpass",
)
self.insert_obj = GrantFactsV2Update.objects.create(
user=self.user,
file=File(open(f"{FIXTURES_PATH}/insert.csv", "rb")),
)
self.update_obj = GrantFactsV2Update.objects.create(
user=self.user,
file=File(open(f"{FIXTURES_PATH}/update.csv", "rb")),
)
def test_without_updates(self):
self.assertEqual(GrantFactsV2.objects.all().count(), 27)
update_grant_facts_v2(
self.insert_obj,
batch_size=4,
)
self.assertEqual(GrantFactsV2.objects.all().count(), 42)
self.assertEqual(self.insert_obj.deleted, 0)
self.assertEqual(self.insert_obj.inserted, 15)
def test_with_updates(self):
self.assertEqual(GrantFactsV2.objects.all().count(), 27)
update_grant_facts_v2(
self.update_obj,
batch_size=4,
)
self.assertEqual(GrantFactsV2.objects.all().count(), 36)
self.assertEqual(self.update_obj.deleted, 9)
self.assertEqual(self.update_obj.inserted, 18)
|
Code4SA/municipal-data
|
municipal_finance/tests/update/test_grant_facts_v2.py
|
Python
|
mit
| 1,824
|
import json
import decimal
import datetime
from django.utils.timezone import is_aware
from bson import objectid
class VersatileJSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time, ObjectID and decimal types.
"""
def default(self, obj):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(obj, datetime.datetime):
r = obj.isoformat()
if obj.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(obj, objectid.ObjectId):
return str(obj)
elif isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, datetime.time):
if is_aware(obj):
raise ValueError("JSON can't represent timezone-aware times.")
r = obj.isoformat()
if obj.microsecond:
r = r[:12]
return r
elif isinstance(obj, decimal.Decimal):
return str(obj)
else:
return json.JSONEncoder.default(self, obj)
|
Pancho/failiac
|
external/encoders.py
|
Python
|
mit
| 960
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
doProjection.py
---------------------
Date : June 2010
Copyright : (C) 2010 by Giuseppe Sucameli
Email : brush dot tyler at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Giuseppe Sucameli'
__date__ = 'June 2010'
__copyright__ = '(C) 2010, Giuseppe Sucameli'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from ui_widgetProjection import Ui_GdalToolsWidget as Ui_Widget
from widgetBatchBase import GdalToolsBaseBatchWidget as BaseBatchWidget
from dialogSRS import GdalToolsSRSDialog as SRSDialog
import GdalTools_utils as Utils
import os.path
class GdalToolsDialog( QWidget, Ui_Widget, BaseBatchWidget ):
def __init__( self, iface ):
QWidget.__init__( self )
self.iface = iface
self.setupUi( self )
BaseBatchWidget.__init__( self, self.iface, "gdalwarp" )
self.inSelector.setType( self.inSelector.FILE )
# set the default QSpinBoxes and QProgressBar value
self.progressBar.setValue(0)
self.progressBar.hide()
self.recurseCheck.hide()
self.setParamsStatus(
[
( self.inSelector, SIGNAL( "filenameChanged()" ) ),
( self.desiredSRSEdit, SIGNAL( "textChanged( const QString & )" ) )
]
)
self.connect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputFileEdit )
self.connect( self.selectDesiredSRSButton, SIGNAL( "clicked()" ), self.fillDesiredSRSEdit )
self.connect( self.batchCheck, SIGNAL( "stateChanged( int )" ), self.switchToolMode )
self.connect( self.recurseCheck, SIGNAL( "stateChanged( int )" ), self.enableRecurse )
def switchToolMode( self ):
self.setCommandViewerEnabled( not self.batchCheck.isChecked() )
self.progressBar.setVisible( self.batchCheck.isChecked() )
self.recurseCheck.setVisible( self.batchCheck.isChecked() )
self.inSelector.clear()
if self.batchCheck.isChecked():
self.inFileLabel = self.label.text()
self.label.setText( QCoreApplication.translate( "GdalTools", "&Input directory" ) )
QObject.disconnect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputFileEdit )
QObject.connect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputDir )
else:
self.label.setText( self.inFileLabel )
QObject.connect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputFileEdit )
QObject.disconnect( self.inSelector, SIGNAL( "selectClicked()" ), self.fillInputDir )
def enableRecurse( self ):
if self.recurseCheck.isChecked():
res = QMessageBox.warning( self, self.tr( "Warning" ),
self.tr( "Warning: CRS information for all raster in subfolders will be rewritten. Are you sure?" ),
QMessageBox.Yes | QMessageBox.No )
if res != QMessageBox.Yes:
self.recurseCheck.setCheckState( Qt.Unchecked )
return
def fillInputFileEdit( self ):
lastUsedFilter = Utils.FileFilter.lastUsedRasterFilter()
inputFile = Utils.FileDialog.getOpenFileName( self, self.tr( "Select the file to analyse" ), Utils.FileFilter.allRastersFilter(), lastUsedFilter )
if inputFile.isEmpty():
return
Utils.FileFilter.setLastUsedRasterFilter( lastUsedFilter )
self.inSelector.setFilename( inputFile )
def fillInputDir( self ):
inputDir = Utils.FileDialog.getExistingDirectory( self, self.tr( "Select the input directory with files to Assign projection" ))
if inputDir.isEmpty():
return
self.inSelector.setFilename( inputDir )
def fillDesiredSRSEdit( self ):
dialog = SRSDialog( "Select desired SRS", self )
if dialog.exec_():
self.desiredSRSEdit.setText( dialog.getProjection() )
def getArguments( self ):
arguments = QStringList()
if not self.desiredSRSEdit.text().isEmpty():
arguments << "-t_srs"
arguments << self.desiredSRSEdit.text()
if self.batchCheck.isChecked():
return arguments
inputFn = self.getInputFileName()
arguments << inputFn
self.tempFile = QString( inputFn )
self.needOverwrite = False
if not self.tempFile.isEmpty():
if self.tempFile.toLower().contains( QRegExp( "\.tif{1,2}" ) ):
self.tempFile = self.tempFile.replace( QRegExp( "\.[a-zA-Z]{2,4}$" ), ".tif" ).append( ".tmp" )
self.needOverwrite = True
else:
self.tempFile = self.tempFile.replace( QRegExp( "\.[a-zA-Z]{2,4}$" ), ".tif" )
arguments << self.tempFile
return arguments
def finished( self ):
outFn = self.getOutputFileName()
if self.needOverwrite:
oldFile = QFile( outFn )
newFile = QFile( self.tempFile )
if oldFile.remove():
newFile.rename( outFn )
fileInfo = QFileInfo( outFn )
if fileInfo.exists():
if self.base.loadCheckBox.isChecked():
self.addLayerIntoCanvas( fileInfo )
QMessageBox.information( self, self.tr( "Finished" ), self.tr( "Processing completed." ) )
else:
QMessageBox.warning( self, self.tr( "Warning" ), self.tr( "%1 not created." ).arg( outFn ) )
def getInputFileName(self):
return self.inSelector.filename()
def getOutputFileName( self ):
return self.inSelector.filename()
def getBatchOutputFileName(self, fn):
# get GeoTiff
fn = QString( fn ).replace( QRegExp( "\.[a-zA-Z]{2,4}$" ), ".tif" )
return BaseBatchWidget.getBatchOutputFileName( self, fn )
def addLayerIntoCanvas(self, fileInfo):
self.iface.addRasterLayer(fileInfo.filePath())
def checkLayer( self ):
layerList = []
layerMap = QgsMapLayerRegistry.instance().mapLayers()
for name, layer in layerMap.iteritems():
if layer.type() == QgsMapLayer.RasterLayer:
layerList.append( unicode( layer.source() ) )
if unicode( self.inputFileEdit.text() ) in layerList:
QMessageBox.warning( self, self.tr( "Assign projection" ), self.tr( "This raster already found in map canvas" ) )
return
self.onRun()
def isBatchEnabled(self):
return self.batchCheck.isChecked()
def isRecursiveScanEnabled(self):
return self.recurseCheck.isChecked()
def setProgressRange(self, maximum):
self.progressBar.setRange(0, maximum)
def updateProgress(self, index, total):
if index < total:
self.progressBar.setValue(index + 1)
else:
self.progressBar.setValue(0)
|
bstroebl/QGIS
|
python/plugins/GdalTools/tools/doProjection.py
|
Python
|
gpl-2.0
| 7,359
|
import core
from core import *
dmide_objtree_icon_size = 32
class DMIDE_ObjTree(wx.ListCtrl):
def __init__(self, parent):
wx.ListCtrl.__init__(self, parent, ID_OBJTREE, style = wx.LC_ICON | wx.LC_VIRTUAL | wx.LC_EDIT_LABELS | wx.NO_BORDER)
self.last_pos = -1
self.icons = []
self.Bind(wx.EVT_SIZE, self.OnSize)
def OnSize(self, event):
event.Skip()
last_pos = self.GetItemRect(self.GetItemCount() - 1)
if self.last_pos != last_pos:
self.last_pos = last_pos
self.Refresh(False)
def UpdateObjTree(self, nObj=None):
if not nObj:
filetree = wx.FindWindowById(ID_FILETREE)
self.DeleteAllItems()
try:
dme_path = filetree.project_path
except AttributeError:
return
build_path = ''
if os.name in ['posix', 'os2', 'mac', 'ce']:
build_path = '/usr/local/byond/bin'
dm_path = os.path.join(build_path, 'DreamMaker')
elif os.name in ['dos', 'nt']:
build_path = 'C:\\Program Files\\BYOND\\bin'
dm_path = os.path.join(build_path, 'dm.exe')
p = subprocess.Popen([dm_path, '-o', dme_path], stdout=subprocess.PIPE)
objs = obj.OBJREAD(p.stdout)
self.objects = objs
else:
self.objects = nObj
def get_icon(object, images):
if isinstance(object, obj.DMIDE_Datum):
icon = object.get_inherited_val('icon')
icon_state = object.get_inherited_val('icon_state')
if icon_state:
icon_state = icon_state.get_value()
else:
icon_state = ''
if icon:
icon = icon.get_value()
icon_path = filetree.dme.get_file_path(icon)
if icon_path:
icon = dmi.DMIREAD(icon_path)
for i in icon:
if i.state == icon_state:
return images.Add(ImgToWx(i.icons[0][0], (dmide_objtree_icon_size, dmide_objtree_icon_size)))
return 0
def populate(items, root, images, types=(obj.DMIDE_Atom,)):
for object in items:
try:
object.name
except AttributeError:
print >> sys.stderr, '[object] object without a name?', object
continue
found = False
for type in types:
if isinstance(object, type):
found = True
break
if not found:
continue
root.icons.append((object.name, get_icon(object, images)))
if hasattr(object, 'children') and len(object.children):
populate(object.children, root, images)
image_list = wx.ImageList(dmide_objtree_icon_size, dmide_objtree_icon_size)
image_list.Add(wx.EmptyBitmapRGBA(dmide_objtree_icon_size, dmide_objtree_icon_size, 128, 128, 128, 128))
populate(objs, self, image_list)
self.AssignImageList(image_list, wx.IMAGE_LIST_NORMAL)
self.SetItemCount(len(self.icons))
def OnGetItemText(self, index, col):
try:
return self.icons[index][0]
except IndexError:
return 'IndexError'
def OnGetItemImage(self, index):
try:
return self.icons[index][1]
except IndexError:
return 'IndexError'
def OnGetItemAttr(self, index):
return None
def GetIconFromTypePath(self, path):
hierarchy = path.split('/')
filetree = wx.FindWindowById(ID_FILETREE)
def get_obj(object, match):
if ('/%s' % object.name) == match:
return object
elif object.name == match.split('/')[1]:
for child in object.children:
value = get_obj(child, match[match.find('/', 2):])
if value:
return value
for object in self.objects:
value = get_obj(object, path)
object = value
if value:
if isinstance(object, obj.DMIDE_Datum):
icon = object.get_inherited_val('icon')
icon_state = object.get_inherited_val('icon_state')
if icon_state:
icon_state = icon_state.get_value()
else:
icon_state = ''
if icon:
icon = icon.get_value()
icon_path = filetree.dme.get_file_path(icon)
if icon_path:
icon = dmi.DMIREAD(icon_path)
for i in icon:
if i.state == icon_state:
return ImgToWx(i.icons[0][0])
def GetVisualAttributesFromPath(self, path):
hierarchy = path.split('/')
filetree = wx.FindWindowById(ID_FILETREE)
def get_obj(object, match):
if ('/%s' % object.name) == match:
return object
elif object.name == match.split('/')[1]:
for child in object.children:
value = get_obj(child, match[match.find('/', 2):])
if value:
return value
for object in self.objects:
value = get_obj(object, path)
object = value
if value:
if isinstance(object, obj.DMIDE_Datum):
icon = object.get_inherited_val('icon')
icon_state = object.get_inherited_val('icon_state')
icon = '' or icon.get_value()
icon_state = '' or icon_state.get_value()
return [icon, icon_state]
|
nyov/dmide
|
core/panels/objtree2.py
|
Python
|
bsd-3-clause
| 4,575
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Boletin.fecha'
db.delete_column(u'app_boletin', 'fecha')
# Adding field 'Boletin.date'
db.add_column(u'app_boletin', 'date',
self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, default=datetime.datetime(2013, 11, 25, 0, 0), blank=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'Boletin.fecha'
db.add_column(u'app_boletin', 'fecha',
self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, default='2013-11-25', blank=True),
keep_default=False)
# Deleting field 'Boletin.date'
db.delete_column(u'app_boletin', 'date')
models = {
u'app.asistencia': {
'Meta': {'object_name': 'Asistencia'},
'alumno': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.AUser']"}),
'clase': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Clase']"}),
'fecha': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'app.auser': {
'Meta': {'object_name': 'AUser'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ap_materno': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'ap_paterno': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'email': ('django.db.models.fields.EmailField', [], {'default': 'None', 'max_length': '75', 'null': 'True', 'blank': 'True'}),
'estado': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'fecha_nacimiento': ('django.db.models.fields.DateField', [], {'default': "'1993-01-01'"}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sexo': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'tipo': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '12', 'db_index': 'True'})
},
u'app.boletin': {
'Meta': {'object_name': 'Boletin'},
'admin': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.AUser']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'texto': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'titulo': ('django.db.models.fields.CharField', [], {'max_length': '140'})
},
u'app.calificacioncriterio': {
'Meta': {'object_name': 'CalificacionCriterio'},
'alumno': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.AUser']"}),
'criterio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Criterio']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'puntaje': ('django.db.models.fields.FloatField', [], {})
},
u'app.clase': {
'Meta': {'unique_together': "(('materia', 'grupo', 'profesor'),)", 'object_name': 'Clase'},
'grupo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Grupo']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'materia': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Materia']"}),
'profesor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.AUser']"})
},
u'app.criterio': {
'Meta': {'object_name': 'Criterio'},
'descripcion': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'periodo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Periodo']"}),
'puntaje': ('django.db.models.fields.FloatField', [], {})
},
u'app.grupo': {
'Meta': {'object_name': 'Grupo'},
'capacidad': ('django.db.models.fields.IntegerField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '5'}),
'salon': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '4'})
},
u'app.horario': {
'Meta': {'object_name': 'Horario'},
'clase': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Clase']"}),
'dia': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'hora_fin': ('django.db.models.fields.TimeField', [], {}),
'hora_inicio': ('django.db.models.fields.TimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'app.materia': {
'Meta': {'object_name': 'Materia'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyname': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '10'}),
'nombre': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
u'app.periodo': {
'Meta': {'object_name': 'Periodo'},
'clase': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Clase']"}),
'fin': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inicio': ('django.db.models.fields.DateField', [], {}),
'numero': ('django.db.models.fields.SmallIntegerField', [], {})
},
u'app.publicacion': {
'Meta': {'object_name': 'Publicacion'},
'creador': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.AUser']"}),
'fecha': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pertenece': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Clase']"})
},
u'app.task': {
'Meta': {'object_name': 'Task'},
'clase': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.Clase']"}),
'descripcion': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'due_date': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'usuario': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['app.AUser']"})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['app']
|
henocdz/akdmik
|
app/migrations/0007_auto__del_field_boletin_fecha__add_field_boletin_date.py
|
Python
|
apache-2.0
| 9,976
|
import xbmc, xbmcgui, xbmcplugin, xbmcaddon, urllib, re, string, sys, os, buggalo
plugin = "ESPN Video"
__author__ = 'stacked <stacked.xbmc@gmail.com>'
__url__ = 'http://code.google.com/p/plugin/'
__date__ = '02-18-2015'
__version__ = '2.0.5'
settings = xbmcaddon.Addon(id='plugin.video.espn.video')
buggalo.SUBMIT_URL = 'http://www.xbmc.byethost17.com/submit.php'
dbg = False
dbglevel = 3
next_thumb = os.path.join( settings.getAddonInfo( 'path' ), 'resources', 'media', 'next.png' )
tvshows_thumb = os.path.join( settings.getAddonInfo( 'path' ), 'resources', 'media', 'tvshows.png' )
original_thumb = os.path.join( settings.getAddonInfo( 'path' ), 'resources', 'media', 'original.png' )
categories_thumb = os.path.join( settings.getAddonInfo( 'path' ), 'resources', 'media', 'categories.png' )
search_thumb = os.path.join( settings.getAddonInfo( 'path' ), 'resources', 'media', 'search.png' )
history_thumb = os.path.join( settings.getAddonInfo( 'path' ), 'resources', 'media', 'history.png' )
import CommonFunctions
common = CommonFunctions
common.plugin = plugin + ' ' + __version__
from addonfunc import addListItem, playListItem, getUrl, getPage, setViewMode, getParameters, retry
@retry(IndexError)
def build_main_directory():
main=[
( settings.getLocalizedString( 30000 ), tvshows_thumb, 'menu2949050', '1' ),
#( settings.getLocalizedString( 30001 ), original_thumb, 'menu3385449', '1' ),
( settings.getLocalizedString( 30002 ), categories_thumb, 'menu2949049', '1' ),
( settings.getLocalizedString( 30005 ), search_thumb, 'search', '2' )
]
for name, thumbnailImage, url, mode in main:
u = { 'mode': mode, 'thumb': thumbnailImage, 'url': url, 'name': name }
infoLabels = { "Title": name, "Plot": name }
addListItem(label = '[ ' + name + ' ]', image = thumbnailImage, url = u, isFolder = True, infoLabels = infoLabels)
if settings.getSetting('presets_search') != '' and settings.getSetting('history') == 'true':
u = { 'mode': '4' }
infoLabels = { "Title": settings.getLocalizedString( 30006 ), "Plot": settings.getLocalizedString( 30006 ) }
addListItem(label = '[ ' + settings.getLocalizedString( 30006 ) + ' ]', image = history_thumb, url = u, isFolder = True, infoLabels = infoLabels)
build_video_directory('http://espn.go.com/video/format/libraryPlaylist?categoryid=2378529', 'The Latest', 'null')
setViewMode("503")
@retry(IndexError)
def build_sub_directory(url, thumb):
saveurl = url
html = getUrl('http://espn.go.com/video/')
html = html.decode(encoding='UTF-16',errors='ignore') #Swedemon fix 2015-02-16
menu = common.parseDOM(html, "div", attrs = { "id": url })
channel = common.parseDOM(menu, "li", attrs = { "class": "channel" })
title = common.parseDOM(channel, "a")
id = common.parseDOM(menu, "li", attrs = { "class": "channel" }, ret = "id")
item_count = 0
if saveurl == 'menu2949050':
shows=[
( settings.getLocalizedString( 30009 ), 'sportscenter' ),
( settings.getLocalizedString( 30010 ), 'first%20take' ),
( settings.getLocalizedString( 30011 ), 'pti' ),
( settings.getLocalizedString( 30012 ), 'ath' ),
( 'Mike and Mike', 'mike%20and%20mike' ),
( 'Outside the Lines', 'outside%the%lines' ),
( 'E:60', 'e:60' ),
( 'Olbermann', 'olbermann' ),
( 'SC Featured', 'sc%20featured' ),
( 'Wider World of Sports', 'wider%20world%20of%20sports' ),
( 'This is Sportscenter', 'this%20is%20sportscenter' ),
( 'Grantland', 'grantland' )
]
for name, search in shows:
url = 'http://search.espn.go.com/results?searchString=' + search + '&start=0&dims=6'
u = { 'mode': '2', 'name': settings.getLocalizedString( 30005 ), 'url': url, 'type': 'history' }
infoLabels = { "Title": name, "Plot": name }
addListItem(label = name, image = tvshows_thumb, url = u, isFolder = True, infoLabels = infoLabels)
if saveurl == 'menu2949049':
categories=[
( 'The Latest', 'the%20latest' ),
( 'NFL', 'nfl' ),
( 'NBA', 'nba' ),
( 'NHL', 'nhl' ),
( 'MLB', 'mlb' ),
( 'Tennis', 'tennis' ),
( 'NASCAR', 'nascar' ),
( 'Golf', 'golf' ),
( 'Boxing', 'boxing' ),
( 'MMA', 'mma' ),
( 'Action Sports', 'action%20sports' ),
( 'College Football', 'college%20football' ),
( 'College Hoops', 'college%20hoops' ),
( 'ESPN FC', 'espn%20fc' ),
( 'ESPNcricinfo', 'espncricinfo' ),
( 'ESPNU', 'espnu' ),
( 'Fantasy', 'fantasy' ),
( 'Racing', 'racing' ),
( 'Sport Science', 'sport%20science' ),
( "Women's Basketball", "women's%20basketball" )
]
for name, search in categories:
url = 'http://search.espn.go.com/results?searchString=' + search + '&start=0&dims=6'
u = { 'mode': '2', 'name': settings.getLocalizedString( 30005 ), 'url': url, 'type': 'history' }
infoLabels = { "Title": name, "Plot": name }
addListItem(label = name, image = tvshows_thumb, url = u, isFolder = True, infoLabels = infoLabels)
for name in title:
name = name.rsplit('(')[0]
url = 'http://espn.go.com/video/format/libraryPlaylist?categoryid=' + id[item_count].replace('channel','')
u = { 'mode': '2', 'name': name, 'url': url, 'type': 'null' }
infoLabels = { "Title": name, "Plot": name }
addListItem(label = name, image = thumb, url = u, isFolder = True, infoLabels = infoLabels)
item_count += 1
xbmcplugin.addSortMethod( handle = int(sys.argv[1]), sortMethod = xbmcplugin.SORT_METHOD_NONE )
setViewMode("503")
xbmcplugin.endOfDirectory( int( sys.argv[1] ) )
@retry(IndexError)
def build_video_directory(url, name, type):
nextname = name
if name == settings.getLocalizedString( 30005 ):
if page == 0 and type != 'history':
try:
newStr = common.getUserInput(settings.getLocalizedString( 30005 ), '').replace(' ','%20')
except:
return
presets = settings.getSetting( "presets_search" )
if presets == '':
save_str = newStr
else:
if presets.find(newStr + ' |') == -1:
save_str = presets + ' | ' + newStr
else:
save_str = presets
settings.setSetting("presets_search", save_str)
else:
newStr = getParameters(url)["searchString"]
url = 'http://search.espn.go.com/results?searchString=' + newStr + '&start=' + str(int(page) * 16) + '&dims=6'
nexturl = url
html = getUrl(url).decode('ascii', 'ignore')
results = common.parseDOM(html, "li", attrs = { "class": "result video-result" })
titledata = common.parseDOM(results, "h3")
title = common.parseDOM(titledata, "a", attrs = { "rel": "nofollow" })
if len(title) == 0:
dialog = xbmcgui.Dialog()
ok = dialog.ok( plugin , settings.getLocalizedString( 30013 ) + '\n' + settings.getLocalizedString( 30014 ) )
remove_menu(newStr,'search')
return
img = common.parseDOM(results, "a", attrs = { "class": "list-thumb" })
desc = common.parseDOM(results, "p")
thumb = common.parseDOM(img, "img", ret = "src" )
pagenum = common.parseDOM(html, "div", attrs = { "class": "page-numbers" })[0]
maxlength = common.parseDOM(pagenum, "span")[1].replace('of ','')
value = common.parseDOM(pagenum, "input", attrs = { "id": "page-number" }, ret = "value" )[0]
pagecount = [ value, maxlength ]
else:
nexturl = url
html = getUrl(url + "&pageNum=" + str(int(page)) + "&sortBy=&assetURL=http://assets.espn.go.com&module=LibraryPlaylist&pagename=vhub_index")
videocell = common.parseDOM(html, "div", attrs = { "class": "video-cell" })
title = common.parseDOM(videocell, "h5")
thumb = common.parseDOM(videocell, "img", ret = "src")
desc = common.parseDOM(common.parseDOM(videocell, "p", attrs = { "class": "watch-now" }), "a", ret = "href")
pagecount = common.parseDOM(html, "div", attrs = { "class": "page-numbers" })[0].rsplit(' of ')
item_count = 0
for name in title:
if '/espn360/' not in thumb[item_count]:
if 'http://' in desc[item_count]:
plot = name
else:
plot = desc[item_count]
try:
data = thumb[item_count].replace('_thumdnail_wbig.jpg','').replace('.jpg','').rsplit('motion/')
url = data[1]
except:
data = thumb[item_count].replace('_thumdnail_wbig.jpg','').replace('.jpg','').rsplit('/')[-4:]
if len(data) >= 4:
url = data[0] + '/' + data[1] + '/' + data[2] + '/' + data[3]
else:
url = 'null'
thumbnailImage = thumb[item_count].replace('_thumdnail_wbig','')
u = { 'mode': '3', 'name': name, 'url': url.replace('motion/',''), 'thumb': thumbnailImage, 'plot': plot }
infoLabels = { "Title": name, "Plot": plot }
addListItem(label = name, image = thumbnailImage, url = u, isFolder = False, infoLabels = infoLabels)
item_count += 1
if pagecount[0] != pagecount[1]:
u = { 'mode': '2', 'name': nextname, 'url': nexturl, 'page': str(int(page) + 1), 'type': 'null' }
infoLabels = { "Title": settings.getLocalizedString( 30003 ), "Plot": settings.getLocalizedString( 30003 ) }
addListItem(label = settings.getLocalizedString( 30003 ), image = next_thumb, url = u, isFolder = True, infoLabels = infoLabels)
xbmcplugin.addSortMethod( handle = int(sys.argv[1]), sortMethod = xbmcplugin.SORT_METHOD_NONE )
setViewMode("503")
xbmcplugin.endOfDirectory( int( sys.argv[1] ) )
def build_history_directory():
presets = settings.getSetting( "presets_search" )
if presets != '':
save = presets.split( " | " )
else:
save = []
cm = []
for name in save:
url = 'http://search.espn.go.com/results?searchString=' + name + '&start=0&dims=6'
name = name.replace('%20', ' ')
cm = [ ( 'Remove', "XBMC.RunPlugin(%s?mode=5&name=%s&url=%s)" % ( sys.argv[ 0 ], urllib.quote_plus(name), urllib.quote_plus('search') ), ) ]
cm += [ ( 'Edit', "XBMC.RunPlugin(%s?mode=6&name=%s&url=%s)" % ( sys.argv[ 0 ], urllib.quote_plus(name), urllib.quote_plus('search') ), ) ]
listitem = xbmcgui.ListItem( label = name, iconImage = "DefaultFolder.png", thumbnailImage = history_thumb )
listitem.setInfo(type = 'video', infoLabels = { "Title": name, "Plot": name })
listitem.addContextMenuItems( cm, replaceItems = False )
u = sys.argv[0] + "?mode=2" + "&name=" + urllib.quote_plus( settings.getLocalizedString( 30005 ) ) + "&url=" + urllib.quote_plus( url ) + "&type=" + urllib.quote_plus( 'history' )
ok = xbmcplugin.addDirectoryItem( handle = int( sys.argv[1] ), url = u, listitem = listitem, isFolder = True )
xbmcplugin.addSortMethod( handle = int(sys.argv[1]), sortMethod = xbmcplugin.SORT_METHOD_NONE )
setViewMode("503")
xbmcplugin.endOfDirectory( int( sys.argv[1] ) )
def remove_menu(name, url):
presets = settings.getSetting( "presets_search" )
save = presets.split( " | " )
del save[save.index(name.replace(' ','%20'))]
sets = ''
x=0
for item in save:
if x == 0:
sets = sets + item
else:
sets = sets + ' | ' + item
x=x+1
settings.setSetting("presets_search", sets)
xbmc.executebuiltin( "Container.Refresh" )
def edit_menu(name, url):
presets = settings.getSetting( "presets_search" )
save = presets.split( " | " )
del save[save.index(name.replace(' ','%20'))]
x=0
for item in save:
if x == 0:
sets = item
else:
sets = sets + ' | ' + item
x=x+1
newStr = common.getUserInput(settings.getLocalizedString( 30008 ), name).replace(' ','%20')
if len(save) == 0:
sets = newStr
else:
sets = sets + ' | ' + newStr
settings.setSetting("presets_search", sets)
xbmc.executebuiltin( "Container.Refresh" )
def play_video(url, name, thumb, plot):
infoLabels = { "Title": name, "Studio": plugin, "Plot": plot }
result = getPage("http://vod.espn.go.com/motion/" + url + ".smil?FLVPlaybackVersion=2.1")
if '404' in str(result["error"]):
dialog = xbmcgui.Dialog()
ok = dialog.ok(plugin, settings.getLocalizedString( 30004 ))
return
else:
playpath = "mp4:" + url + "_" + settings.getSetting("quality") + ".mp4"
# Address a bug in early helix versions:
try:
version = xbmc_version = xbmc.getInfoLabel( "System.BuildVersion" )
version = float(version[:4])
if version >= 14.0 and version < 14.2:
url = 'rtmp://svod.espn.go.com/motion/' + url + "_" + settings.getSetting("quality") + ".mp4"
else:
url = 'rtmp://svod.espn.go.com/motion/'
except:
url = 'rtmp://svod.espn.go.com/motion/'
playListItem(label = name, image = thumb, path = url, infoLabels = infoLabels, PlayPath = playpath)
params = getParameters(sys.argv[2])
mode = None
name = None
url = None
thumb = None
plot = None
page = 0
try:
url=urllib.unquote_plus(params["url"])
except:
pass
try:
name=urllib.unquote_plus(params["name"])
except:
pass
try:
mode=int(params["mode"])
except:
pass
try:
page=int(params["page"])
except:
pass
try:
plot=urllib.unquote_plus(params["plot"])
except:
pass
try:
thumb = urllib.unquote_plus(params["thumb"])
except:
pass
try:
type = urllib.unquote_plus(params["type"])
except:
pass
try:
if mode == None:
build_main_directory()
elif mode == 1:
build_sub_directory(url, thumb)
elif mode == 2:
build_video_directory(url, name, type)
elif mode == 3:
play_video(url, name, thumb, plot)
elif mode == 4:
build_history_directory()
elif mode == 5:
remove_menu(name, url)
elif mode == 6:
edit_menu(name, url)
except Exception:
buggalo.onExceptionRaised()
|
stacked/plugin.video.espn.video
|
default.py
|
Python
|
gpl-2.0
| 12,998
|
import os
import shutil
import tempfile
import vmprof
import prof_six as six
from _prof_imports import TreeStats, CallTreeStat
class VmProfProfile(object):
""" Wrapper class that represents VmProf Python profiling backend with API matching
the cProfile.
"""
def __init__(self):
self.stats = None
self.basepath = None
self.file = None
self.is_enabled = False
def runcall(self, func, *args, **kw):
self.enable()
try:
return func(*args, **kw)
finally:
self.disable()
def enable(self):
if not self.is_enabled:
if not os.path.exists(self.basepath):
os.makedirs(self.basepath)
self.file = tempfile.NamedTemporaryFile(delete=False, dir=self.basepath)
try:
vmprof.enable(self.file.fileno(), lines=True)
except:
vmprof.enable(self.file.fileno())
self.is_enabled = True
def disable(self):
if self.is_enabled:
vmprof.disable()
self.file.close()
self.is_enabled = False
def create_stats(self):
return None
def getstats(self):
self.create_stats()
return self.stats
def dump_stats(self, file):
shutil.copyfile(self.file.name, file)
def _walk_tree(self, parent, node, callback):
tree = callback(parent, node)
for c in six.itervalues(node.children):
self._walk_tree(node, c, callback)
return tree
def tree_stats_to_response(self, filename, response):
tree_stats_to_response(filename, response)
def snapshot_extension(self):
return '.prof'
def _walk_tree(parent, node, callback):
if node is None:
return None
tree = callback(parent, node)
for c in six.itervalues(node.children):
_walk_tree(tree, c, callback)
return tree
def tree_stats_to_response(filename, response):
stats = vmprof.read_profile(filename)
response.tree_stats = TreeStats()
response.tree_stats.sampling_interval = vmprof.DEFAULT_PERIOD
try:
tree = stats.get_tree()
except vmprof.stats.EmptyProfileFile:
tree = None
def convert(parent, node):
tstats = CallTreeStat()
tstats.name = node.name
tstats.count = node.count
tstats.children = []
tstats.line_count = getattr(node, 'lines', {})
if parent is not None:
if parent.children is None:
parent.children = []
parent.children.append(tstats)
return tstats
response.tree_stats.call_tree = _walk_tree(None, tree, convert)
|
siosio/intellij-community
|
python/helpers/profiler/vmprof_profiler.py
|
Python
|
apache-2.0
| 2,691
|
#!/usr/bin/env python3
"""
The MIT License (MIT)
Copyright (c) 2016 davide
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
""" Prende i dati dalla chiave Redis (*:Valori) passata come argomento all'avvio,
cerca fra i parametri si start e stop (data minore, data maggiore),
elabora e ricrea il file .csv
"""
import os,time,json,redis,sys
import mjl, mhl, flt # Non servono tutte, ormai le metto d'abitudine ;)
DirBase="/var/www" # Meglio specificare il percorso assoluto
ConfigFile=DirBase+"/conf/config.json"
# Apro il database Redis con l'istruzione della mia libreria
MyDB = flt.OpenDBFile(ConfigFile)
# Controllo se piu` di un argomento o se richiesto l'help
if len(sys.argv) != 4 or sys.argv[1] == "-h":
print ("\n\tUso: %s <RedisKey> <Start> <Stop>" % sys.argv[0])
print ("""
Questo programma prende una chiave Redis contenente i valori (*:Valori),
elabora, e crea il file .csv
""")
exit()
if len(sys.argv) == 4 and MyDB.exists(sys.argv[1]):
# Setto le variabili per comodita` e chiarezza di programma
Key=sys.argv[1]
print ("Key: \t\t\t", Key)
# Ho usato il secondo e terzo valore (sets:NOME:ID), perche potrebbero esserci dei duplicati fra allarmi e grafici e .. altro (se ci sara`)
FileName=DirBase+"/"+Key.split(":")[4]+Key.split(":")[5]+".csv"
if os.path.isfile(FileName):
print ("Deleting: \t\t\"%s\"" % FileName)
os.remove(FileName) # Elimino il file se esiste
IntestazioneCSV="Data"
IntestazioneCSV=IntestazioneCSV+","+Key.split(":")[4] # 4 e` il tipo (temperatura/pir/..)
FileTemp = open(FileName,"w")
FileTemp.write(IntestazioneCSV+"\n") # Scrittura intestazione
for i in range (MyDB.llen(Key)):
ValoreCSV=flt.Decode(MyDB.lindex(Key,i))
if sys.argv[2] < ValoreCSV < sys.argv[3] :
FileTemp.write(ValoreCSV+"\n")
FileTemp.close()
print ("[re]Generated file: \t\"{}\"".format(FileName))
elif not MyDB.exists(sys.argv[1]):
print ("Chiave inesistente", sys.argv[1])
|
raspibo/Livello1
|
var/www/cgi-bin/valori2csv_search_date.py
|
Python
|
mit
| 2,996
|
"""SQLAlchemy Metadata and Session object"""
from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import scoped_session, sessionmaker
__all__ = ['Session', 'metadata']
# SQLAlchemy database engine. Updated by model.init_model()
engine = None
# SQLAlchemy session manager. Updated by model.init_model()
Session = None
# Global metadata. If you have multiple databases with overlapping table
# names, you'll need a metadata for each database
metadata = MetaData()
# Table base class for declarative
TableBase = declarative_base(metadata=metadata)
|
veekun/spline
|
spline/model/meta.py
|
Python
|
mit
| 607
|
/*
* Copyright (C) 2017 glbrimhall.com
*
* This file is part of an Elevator Simulator program written in java.
*
* Elevator Simulator program is free software: you can redistribute it and/or
* modify it under the terms of the GNU General Public License as published
* by the Free Software Foundation, version 3 of the License.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.glbrimhall.elevator;
import static com.glbrimhall.elevator.ParseCommand.OK;
/**
* The ParseHelp class maps the "status" user input to
* {@link ElevatorSystem#reportStatus() }
*/
public class ParseHelp extends ParseCommand {
public ParseHelp() {
super( "help", "list available commands to control the simulator " );
}
@Override
public String Parse( String cmd ) {
elevatorSystem.getParser().showHelp();
return OK;
}
}
|
glbrimhall/elevator
|
python/src/ParseHelp.py
|
Python
|
gpl-3.0
| 1,255
|
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import datetime
import re
import unicodedata
from decimal import Decimal, InvalidOperation
from itertools import islice
from collections import Iterator
from dateutil.parser import parse as parse_date
from weboob.capabilities.base import empty
from weboob.tools.compat import basestring
from weboob.exceptions import ParseError
from weboob.browser.url import URL
from weboob.tools.log import getLogger, DEBUG_FILTERS
class NoDefault(object):
def __repr__(self):
return 'NO_DEFAULT'
_NO_DEFAULT = NoDefault()
__all__ = ['FilterError', 'ColumnNotFound', 'RegexpError', 'ItemNotFound',
'Filter', 'Base', 'Env', 'TableCell', 'RawText',
'CleanText', 'Lower', 'CleanDecimal', 'Field', 'Regexp', 'Map',
'DateTime', 'Date', 'Time', 'DateGuesser', 'Duration',
'MultiFilter', 'CombineDate', 'Format', 'Join', 'Type', 'Eval',
'BrowserURL', 'Async', 'AsyncLoad']
class FilterError(ParseError):
pass
class ColumnNotFound(FilterError):
pass
class RegexpError(FilterError):
pass
class ItemNotFound(FilterError):
pass
class _Filter(object):
_creation_counter = 0
def __init__(self, default=_NO_DEFAULT):
self._key = None
self._obj = None
self.default = default
self._creation_counter = _Filter._creation_counter
_Filter._creation_counter += 1
def __or__(self, o):
self.default = o
return self
def __and__(self, o):
if isinstance(o, type) and issubclass(o, _Filter):
o = o()
o.selector = self
return o
def default_or_raise(self, exception):
if self.default is not _NO_DEFAULT:
return self.default
else:
raise exception
def __str__(self):
return self.__class__.__name__
def debug(*args):
"""
A decorator function to provide some debug information
in Filters.
It prints by default the name of the Filter and the input value.
"""
def wraper(function):
def print_debug(self, value):
logger = getLogger('b2filters')
result = ''
outputvalue = value
if isinstance(value, list):
from lxml import etree
outputvalue = ''
first = True
for element in value:
if first:
first = False
else:
outputvalue += ', '
if isinstance(element, etree.ElementBase):
outputvalue += "%s" % etree.tostring(element, encoding=unicode)
else:
outputvalue += "%r" % element
if self._obj is not None:
result += "%s" % self._obj._random_id
if self._key is not None:
result += ".%s" % self._key
name = str(self)
result += " %s(%r" % (name, outputvalue)
for arg in self.__dict__:
if arg.startswith('_') or arg == u"selector":
continue
if arg == u'default' and getattr(self, arg) == _NO_DEFAULT:
continue
result += ", %s=%r" % (arg, getattr(self, arg))
result += u')'
logger.log(DEBUG_FILTERS, result)
res = function(self, value)
return res
return print_debug
return wraper
class Filter(_Filter):
"""
Class used to filter on a HTML element given as call parameter to return
matching elements.
Filters can be chained, so the parameter supplied to constructor can be
either a xpath selector string, or an other filter called before.
>>> from lxml.html import etree
>>> f = CleanDecimal(CleanText('//p'), replace_dots=True)
>>> f(etree.fromstring('<html><body><p>blah: <span>229,90</span></p></body></html>'))
Decimal('229.90')
"""
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Filter, self).__init__(default=default)
self.selector = selector
@classmethod
def select(cls, selector, item, obj=None, key=None):
if isinstance(selector, basestring):
return item.xpath(selector)
elif isinstance(selector, _Filter):
selector._key = key
selector._obj = obj
return selector(item)
elif callable(selector):
return selector(item)
else:
return selector
def __call__(self, item):
return self.filter(self.select(self.selector, item, key=self._key, obj=self._obj))
@debug()
def filter(self, value):
"""
This method have to be overrided by children classes.
"""
raise NotImplementedError()
class _Selector(Filter):
def filter(self, elements):
if elements is not None:
return elements
else:
return self.default_or_raise(ParseError('Element %r not found' % self.selector))
class AsyncLoad(Filter):
def __call__(self, item):
link = self.select(self.selector, item, key=self._key, obj=self._obj)
return item.page.browser.async_open(link) if link else None
class Async(_Filter):
def __init__(self, name, selector=None):
super(Async, self).__init__()
self.selector = selector
self.name = name
def __and__(self, o):
if isinstance(o, type) and issubclass(o, _Filter):
o = o()
self.selector = o
return self
def __call__(self, item):
if item.loaders[self.name] is None:
return None
result = item.loaders[self.name].result()
assert result.page is not None, 'The loaded url %s hasn\'t been matched by an URL object' % result.url
return self.selector(result.page.doc)
class Base(Filter):
"""
Change the base element used in filters.
>>> Base(Env('header'), CleanText('./h1')) # doctest: +SKIP
"""
def __call__(self, item):
base = self.select(self.base, item, obj=self._obj, key=self._key)
return self.selector(base)
def __init__(self, base, selector=None, default=_NO_DEFAULT):
super(Base, self).__init__(selector, default)
self.base = base
class Env(_Filter):
"""
Filter to get environment value of the item.
It is used for example to get page parameters, or when there is a parse()
method on ItemElement.
"""
def __init__(self, name, default=_NO_DEFAULT):
super(Env, self).__init__(default)
self.name = name
def __call__(self, item):
try:
return item.env[self.name]
except KeyError:
return self.default_or_raise(ParseError('Environment variable %s not found' % self.name))
class TableCell(_Filter):
"""
Used with TableElement, it get the cell value from its name.
For example:
>>> from weboob.capabilities.bank import Transaction
>>> from weboob.browser.elements import TableElement, ItemElement
>>> class table(TableElement):
... head_xpath = '//table/thead/th'
... item_xpath = '//table/tbody/tr'
... col_date = u'Date'
... col_label = [u'Name', u'Label']
... class item(ItemElement):
... klass = Transaction
... obj_date = Date(TableCell('date'))
... obj_label = CleanText(TableCell('label'))
...
"""
def __init__(self, *names, **kwargs):
super(TableCell, self).__init__(**kwargs)
self.names = names
def __call__(self, item):
for name in self.names:
idx = item.parent.get_colnum(name)
if idx is not None:
return item.xpath('./td[%s]' % (idx + 1))
return self.default_or_raise(ColumnNotFound('Unable to find column %s' % ' or '.join(self.names)))
class RawText(Filter):
@debug()
def filter(self, el):
if isinstance(el, (tuple, list)):
return u' '.join([self.filter(e) for e in el])
if el.text is None:
return self.default
else:
return unicode(el.text)
class CleanText(Filter):
"""
Get a cleaned text from an element.
It first replaces all tabs and multiple spaces
(including newlines if ``newlines`` is True)
to one space and strips the result string.
The result is coerced into unicode, and optionally normalized
according to the ``normalize`` argument.
Then it replaces all symbols given in the ``symbols`` argument.
>>> CleanText().filter('coucou ')
u'coucou'
>>> CleanText().filter(u'coucou\xa0coucou')
u'coucou coucou'
>>> CleanText(newlines=True).filter(u'coucou\\r\\n coucou ')
u'coucou coucou'
>>> CleanText(newlines=False).filter(u'coucou\\r\\n coucou ')
u'coucou\\ncoucou'
"""
def __init__(self, selector=None, symbols='', replace=[], children=True, newlines=True, normalize='NFC', **kwargs):
super(CleanText, self).__init__(selector, **kwargs)
self.symbols = symbols
self.toreplace = replace
self.children = children
self.newlines = newlines
self.normalize = normalize
@debug()
def filter(self, txt):
if isinstance(txt, (tuple, list)):
txt = u' '.join([self.clean(item, children=self.children) for item in txt])
txt = self.clean(txt, self.children, self.newlines, self.normalize)
txt = self.remove(txt, self.symbols)
txt = self.replace(txt, self.toreplace)
# ensure it didn't become str by mistake
return unicode(txt)
@classmethod
def clean(cls, txt, children=True, newlines=True, normalize='NFC'):
if not isinstance(txt, basestring):
if children:
txt = [t.strip() for t in txt.itertext()]
else:
txt = [txt.text.strip()]
txt = u' '.join(txt) # 'foo bar'
if newlines:
txt = re.compile(u'\s+', flags=re.UNICODE).sub(u' ', txt) # 'foo bar'
else:
# normalize newlines and clean what is inside
txt = '\n'.join([cls.clean(l) for l in txt.splitlines()])
txt = txt.strip()
# lxml under Python 2 returns str instead of unicode if it is pure ASCII
txt = unicode(txt)
# normalize to a standard Unicode form
if normalize:
txt = unicodedata.normalize(normalize, txt)
return txt
@classmethod
def remove(cls, txt, symbols):
for symbol in symbols:
txt = txt.replace(symbol, '')
return txt.strip()
@classmethod
def replace(cls, txt, replace):
for before, after in replace:
txt = txt.replace(before, after)
return txt
class Lower(CleanText):
@debug()
def filter(self, txt):
txt = super(Lower, self).filter(txt)
return txt.lower()
class CleanDecimal(CleanText):
"""
Get a cleaned Decimal value from an element.
replace_dots is False by default. A dot is interpreted as a decimal separator.
If replace_dots is set to True, we remove all the dots. The ',' is used as decimal
separator (often useful for French values)
If replace_dots is a tuple, the first element will be used as the thousands separator,
and the second as the decimal separator.
See http://en.wikipedia.org/wiki/Thousands_separator#Examples_of_use
For example, for the UK style (as in 1,234,567.89):
>>> CleanDecimal('./td[1]', replace_dots=(',', '.')) # doctest: +SKIP
"""
def __init__(self, selector=None, replace_dots=False, sign=None, default=_NO_DEFAULT):
super(CleanDecimal, self).__init__(selector, default=default)
self.replace_dots = replace_dots
self.sign = sign
@debug()
def filter(self, text):
if type(text) in (float, int, long):
text = str(text)
if empty(text):
return self.default_or_raise(ParseError('Unable to parse %r' % text))
original_text = text = super(CleanDecimal, self).filter(text)
if self.replace_dots:
if type(self.replace_dots) is tuple:
thousands_sep, decimal_sep = self.replace_dots
else:
thousands_sep, decimal_sep = '.', ','
text = text.replace(thousands_sep, '').replace(decimal_sep, '.')
try:
v = Decimal(re.sub(r'[^\d\-\.]', '', text))
if self.sign:
v *= self.sign(original_text)
return v
except InvalidOperation as e:
return self.default_or_raise(e)
class Slugify(Filter):
@debug()
def filter(self, label):
label = re.sub(r'[^A-Za-z0-9]', ' ', label.lower()).strip()
label = re.sub(r'\s+', '-', label)
return label
class Type(Filter):
"""
Get a cleaned value of any type from an element text.
The type_func can be any callable (class, function, etc.).
By default an empty string will not be parsed but it can be changed
by specifying minlen=False. Otherwise, a minimal length can be specified.
>>> Type(CleanText('./td[1]'), type=int) # doctest: +SKIP
>>> Type(type=int).filter('42')
42
>>> Type(type=int, default='NaN').filter('')
'NaN'
>>> Type(type=str, minlen=False, default='a').filter('')
''
>>> Type(type=str, minlen=0, default='a').filter('')
'a'
"""
def __init__(self, selector=None, type=None, minlen=0, default=_NO_DEFAULT):
super(Type, self).__init__(selector, default=default)
self.type_func = type
self.minlen = minlen
@debug()
def filter(self, txt):
if empty(txt):
return self.default_or_raise(ParseError('Unable to parse %r' % txt))
if self.minlen is not False and len(txt) <= self.minlen:
return self.default_or_raise(ParseError('Unable to parse %r' % txt))
try:
return self.type_func(txt)
except ValueError as e:
return self.default_or_raise(ParseError('Unable to parse %r: %s' % (txt, e)))
class Field(_Filter):
"""
Get the attribute of object.
"""
def __init__(self, name):
super(Field, self).__init__()
self.name = name
def __call__(self, item):
return item.use_selector(getattr(item, 'obj_%s' % self.name), key=self._key)
# Based on nth from https://docs.python.org/2/library/itertools.html
def nth(iterable, n, default=None):
"Returns the nth item or a default value, n can be negative, or '*' for all"
if n == '*':
return iterable
if n < 0:
iterable = reversed(list(iterable))
n = abs(n) - 1
return next(islice(iterable, n, None), default)
def ordinal(n):
"To have some readable debug information: '*' => all, 0 => 1st, 1 => 2nd..."
if n == '*':
return 'all'
i = abs(n)
n = n - 1 if n < 0 else n + 1
return str(n) + ('th' if i > 2 else ['st', 'nd', 'rd'][i])
class Regexp(Filter):
r"""
Apply a regex.
>>> from lxml.html import etree
>>> doc = etree.fromstring('<html><body><p>Date: <span>13/08/1988</span></p></body></html>')
>>> Regexp(CleanText('//p'), r'Date: (\d+)/(\d+)/(\d+)', '\\3-\\2-\\1')(doc)
u'1988-08-13'
>>> (Regexp(CleanText('//body'), r'(\d+)', nth=1))(doc)
u'08'
>>> (Regexp(CleanText('//body'), r'(\d+)', nth=-1))(doc)
u'1988'
>>> (Regexp(CleanText('//body'), r'(\d+)', template='[\\1]', nth='*'))(doc)
[u'[13]', u'[08]', u'[1988]']
"""
def __init__(self, selector=None, pattern=None, template=None, nth=0, flags=0, default=_NO_DEFAULT):
super(Regexp, self).__init__(selector, default=default)
assert pattern is not None
self.pattern = pattern
self._regex = re.compile(pattern, flags)
self.template = template
self.nth = nth
def expand(self, m):
if self.template is None:
return next(g for g in m.groups() if g is not None)
return self.template(m) if callable(self.template) else m.expand(self.template)
@debug()
def filter(self, txt):
if isinstance(txt, (tuple, list)):
txt = u' '.join([t.strip() for t in txt.itertext()])
m = self._regex.search(txt) if self.nth == 0 else \
nth(self._regex.finditer(txt), self.nth)
if not m:
msg = 'Unable to find %s %s in %r' % (ordinal(self.nth), self.pattern, txt)
return self.default_or_raise(RegexpError(msg))
if isinstance(m, Iterator):
return map(self.expand, m)
return self.expand(m)
class Map(Filter):
def __init__(self, selector, map_dict, default=_NO_DEFAULT):
super(Map, self).__init__(selector, default=default)
self.map_dict = map_dict
@debug()
def filter(self, txt):
try:
return self.map_dict[txt]
except KeyError:
return self.default_or_raise(ItemNotFound('Unable to handle %r on %r' % (txt, self.map_dict)))
class DateTime(Filter):
def __init__(self, selector=None, default=_NO_DEFAULT, dayfirst=False, translations=None):
super(DateTime, self).__init__(selector, default=default)
self.dayfirst = dayfirst
self.translations = translations
@debug()
def filter(self, txt):
if empty(txt) or txt == '':
return self.default_or_raise(ParseError('Unable to parse %r' % txt))
try:
if self.translations:
for search, repl in self.translations:
txt = search.sub(repl, txt)
return parse_date(txt, dayfirst=self.dayfirst)
except (ValueError, TypeError) as e:
return self.default_or_raise(ParseError('Unable to parse %r: %s' % (txt, e)))
class Date(DateTime):
def __init__(self, selector=None, default=_NO_DEFAULT, dayfirst=False, translations=None):
super(Date, self).__init__(selector, default=default, dayfirst=dayfirst, translations=translations)
@debug()
def filter(self, txt):
datetime = super(Date, self).filter(txt)
if hasattr(datetime, 'date'):
return datetime.date()
else:
return datetime
class DateGuesser(Filter):
def __init__(self, selector, date_guesser, **kwargs):
super(DateGuesser, self).__init__(selector)
self.date_guesser = date_guesser
self.kwargs = kwargs
def __call__(self, item):
values = self.select(self.selector, item, obj=self._obj, key=self._key)
date_guesser = self.date_guesser
# In case Env() is used to kive date_guesser.
if isinstance(date_guesser, _Filter):
date_guesser = self.select(date_guesser, item, obj=self._obj, key=self._key)
if isinstance(values, basestring):
values = re.split('[/-]', values)
if len(values) == 2:
day, month = map(int, values)
else:
raise ParseError('Unable to take (day, month) tuple from %r' % values)
return date_guesser.guess_date(day, month, **self.kwargs)
class Time(Filter):
klass = datetime.time
_regexp = re.compile(r'(?P<hh>\d+):?(?P<mm>\d+)(:(?P<ss>\d+))?')
kwargs = {'hour': 'hh', 'minute': 'mm', 'second': 'ss'}
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Time, self).__init__(selector, default=default)
@debug()
def filter(self, txt):
m = self._regexp.search(txt)
if m:
kwargs = {}
for key, index in self.kwargs.iteritems():
kwargs[key] = int(m.groupdict()[index] or 0)
return self.klass(**kwargs)
return self.default_or_raise(ParseError('Unable to find time in %r' % txt))
class Duration(Time):
klass = datetime.timedelta
_regexp = re.compile(r'((?P<hh>\d+)[:;])?(?P<mm>\d+)[;:](?P<ss>\d+)')
kwargs = {'hours': 'hh', 'minutes': 'mm', 'seconds': 'ss'}
class MultiFilter(Filter):
def __init__(self, *args, **kwargs):
default = kwargs.pop('default', _NO_DEFAULT)
super(MultiFilter, self).__init__(args, default)
def __call__(self, item):
values = [self.select(selector, item, obj=self._obj, key=self._key) for selector in self.selector]
return self.filter(tuple(values))
def filter(self, values):
raise NotImplementedError()
class CombineDate(MultiFilter):
def __init__(self, date, time):
super(CombineDate, self).__init__(date, time)
@debug()
def filter(self, values):
return datetime.datetime.combine(values[0], values[1])
class Format(MultiFilter):
def __init__(self, fmt, *args):
super(Format, self).__init__(*args)
self.fmt = fmt
@debug()
def filter(self, values):
return self.fmt % values
class BrowserURL(MultiFilter):
def __init__(self, url_name, **kwargs):
super(BrowserURL, self).__init__(*kwargs.values())
self.url_name = url_name
self.keys = kwargs.keys()
def __call__(self, item):
values = super(BrowserURL, self).__call__(item)
url = getattr(item.page.browser, self.url_name)
assert isinstance(url, URL), "%s.%s must be an URL object" % (type(item.page.browser).__name__, self.url_name)
return url.build(**dict(zip(self.keys, values)))
@debug()
def filter(self, values):
return values
class Join(Filter):
def __init__(self, pattern, selector=None, textCleaner=CleanText, newline=False, addBefore='', addAfter=''):
super(Join, self).__init__(selector)
self.pattern = pattern
self.textCleaner = textCleaner
self.newline = newline
self.addBefore = addBefore
self.addAfter = addAfter
@debug()
def filter(self, el):
items = [self.textCleaner.clean(e) for e in el]
items = [item for item in items if item]
if self.newline:
items = ['%s\r\n' % item for item in items]
result = self.pattern.join(items)
if self.addBefore:
result = '%s%s' % (self.addBefore, result)
if self.addAfter:
result = '%s%s' % (result, self.addAfter)
return result
class Eval(MultiFilter):
"""
Evaluate a function with given 'deferred' arguments.
>>> F = Field; Eval(lambda a, b, c: a * b + c, F('foo'), F('bar'), F('baz')) # doctest: +SKIP
>>> Eval(lambda x, y: x * y + 1).filter([3, 7])
22
"""
def __init__(self, func, *args):
super(Eval, self).__init__(*args)
self.func = func
@debug()
def filter(self, values):
return self.func(*values)
def test_CleanText():
# This test works poorly under a doctest, or would be hard to read
assert CleanText().filter(u' coucou \n\théhé') == u'coucou héhé'
assert CleanText().filter('coucou\xa0coucou') == CleanText().filter(u'coucou\xa0coucou') == u'coucou coucou'
# Unicode normalization
assert CleanText().filter(u'Éçã') == u'Éçã'
assert CleanText(normalize='NFKC').filter(u'…') == u'...'
assert CleanText().filter(u'…') == u'…'
# Diacritical mark (dakuten)
assert CleanText().filter(u'\u3053\u3099') == u'\u3054'
assert CleanText(normalize='NFD').filter(u'\u3053\u3099') == u'\u3053\u3099'
assert CleanText(normalize='NFD').filter(u'\u3054') == u'\u3053\u3099'
assert CleanText(normalize=False).filter(u'\u3053\u3099') == u'\u3053\u3099'
|
sputnick-dev/weboob
|
weboob/browser/filters/standard.py
|
Python
|
agpl-3.0
| 24,256
|
# coding: utf-8
from qgis.core import QgsMarkerSymbolV2
from qgis.gui import QgsSizeScaleWidget
from qgis.utils import iface
layer = iface.activeLayer()
canvas = iface.mapCanvas()
marker_symbol_v2 = QgsMarkerSymbolV2.createSimple({
'color': 'blue',
'name': 'triangle'
})
size_scale_widget = QgsSizeScaleWidget(layer, marker_symbol_v2)
size_scale_widget.setMapCanvas(canvas)
size_scale_widget.show()
|
webgeodatavore/pyqgis-samples
|
gui/qgis-sample-QgsSizeScaleWidget.py
|
Python
|
gpl-2.0
| 411
|
import re
from mal_types import (_symbol, _keyword, _list, _vector, _hash_map)
class Blank(Exception): pass
class Reader():
def __init__(self, tokens, position=0):
self.tokens = tokens
self.position = position
def next(self):
self.position += 1
return self.tokens[self.position-1]
def peek(self):
if len(self.tokens) > self.position:
return self.tokens[self.position]
else:
return None
def tokenize(str):
tre = re.compile(r"""[\s,]*(~@|[\[\]{}()'`~^@]|"(?:[\\].|[^\\"])*"|;.*|[^\s\[\]{}()'"`@,;]+)""");
return [t for t in re.findall(tre, str) if t[0] != ';']
def read_atom(reader):
int_re = re.compile(r"-?[0-9]+$")
float_re = re.compile(r"-?[0-9][0-9.]*$")
token = reader.next()
if re.match(int_re, token): return int(token)
elif re.match(float_re, token): return int(token)
elif token[0] == '"': return token[1:-1].replace('\\"', '"')
elif token[0] == ':': return _keyword(token[1:])
elif token == "nil": return None
elif token == "true": return True
elif token == "false": return False
else: return _symbol(token)
def read_sequence(reader, typ=list, start='(', end=')'):
ast = typ()
token = reader.next()
if token != start: raise Exception("expected '" + start + "'")
token = reader.peek()
while token != end:
if not token: raise Exception("expected '" + end + "', got EOF")
ast.append(read_form(reader))
token = reader.peek()
reader.next()
return ast
def read_hash_map(reader):
lst = read_sequence(reader, list, '{', '}')
return _hash_map(*lst)
def read_list(reader):
return read_sequence(reader, _list, '(', ')')
def read_vector(reader):
return read_sequence(reader, _vector, '[', ']')
def read_form(reader):
token = reader.peek()
# reader macros/transforms
if token[0] == ';':
reader.next()
return None
elif token == '\'':
reader.next()
return _list(_symbol('quote'), read_form(reader))
elif token == '`':
reader.next()
return _list(_symbol('quasiquote'), read_form(reader))
elif token == '~':
reader.next()
return _list(_symbol('unquote'), read_form(reader))
elif token == '~@':
reader.next()
return _list(_symbol('splice-unquote'), read_form(reader))
elif token == '^':
reader.next()
meta = read_form(reader)
return _list(_symbol('with-meta'), read_form(reader), meta)
elif token == '@':
reader.next()
return _list(_symbol('deref'), read_form(reader))
# list
elif token == ')': raise Exception("unexpected ')'")
elif token == '(': return read_list(reader)
# vector
elif token == ']': raise Exception("unexpected ']'");
elif token == '[': return read_vector(reader);
# hash-map
elif token == '}': raise Exception("unexpected '}'");
elif token == '{': return read_hash_map(reader);
# atom
else: return read_atom(reader);
def read_str(str):
tokens = tokenize(str)
if len(tokens) == 0: raise Blank("Blank Line")
return read_form(Reader(tokens))
|
alphaKAI/mal
|
python/reader.py
|
Python
|
mpl-2.0
| 3,275
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from plim.adapters.babelplugin import extract
from plim.util import StringIO
from .. import TestCaseBase
class TestBabelPlugin(TestCaseBase):
def test_babel_extractor(self):
fileobj = StringIO(self.get_file_contents('babelplugin_test.plim'))
keywords = ['_', 'gettext', 'ungettext', 'pluralize']
extracted = [(data[1], data[2]) for data in extract(fileobj, keywords, [], {})]
assert ('_', 'Test') in extracted
assert ('_', 'View more') in extracted
assert ('pluralize', ('${num} conversation has been marked as read.',
'${num} conversations have been marked as read.',
None, None)) in extracted
assert ('ungettext', ('{num} conversation has been marked as read.',
'{num} conversations have been marked as read.',
None)) in extracted
assert ('gettext', 'N') not in extracted
|
avanov/Plim
|
tests/adapters/test_babelplugin.py
|
Python
|
mit
| 1,051
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-05 14:47
from __future__ import unicode_literals
from django.db import migrations
from django.utils import timezone
from temba.utils import chunk_list
def apply_as_migration(apps, schema_editor):
Flow = apps.get_model('flows', "Flow")
FlowRun = apps.get_model('flows', "FlowRun")
FlowStep = apps.get_model('flows', "FlowStep")
flows = Flow.objects.filter(is_archived=True)
for flow in flows:
runs = FlowRun.objects.filter(is_active=True, exit_type=None, flow_id=flow.id)
run_ids = list(runs.values_list('id', flat=True))
# batch this for 1,000 runs at a time so we don't grab locks for too long
for id_batch in chunk_list(run_ids, 1000):
now = timezone.now()
# mark all steps in these runs as having been left
FlowStep.objects.filter(run__id__in=id_batch, left_on=None).update(left_on=now)
runs = FlowRun.objects.filter(id__in=id_batch)
runs.update(is_active=False, exited_on=now, exit_type='I', modified_on=now)
def apply_manual():
from temba.flows.models import Flow, FlowRun
flows = Flow.objects.filter(is_archived=True)
for flow in flows:
runs = FlowRun.objects.filter(is_active=True, exit_type=None, flow_id=flow.id)
FlowRun.bulk_exit(runs, FlowRun.EXIT_TYPE_INTERRUPTED)
class Migration(migrations.Migration):
dependencies = [
('flows', '0096_populate_flownodecount'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
onaio/rapidpro
|
temba/flows/migrations/0097_interrupt_runs_for_archived_flows.py
|
Python
|
agpl-3.0
| 1,586
|
from datetime import datetime
from .base import Base
__all__ = ["Plan"]
class Plan(Base):
def __init__(self, attrs={}):
super(Plan, self).__init__(attrs)
self.initialize_attr("amount", int)
self.initialize_attr("created_at", datetime)
self.initialize_attr("currency", str)
self.initialize_attr("name", str)
self.initialize_attr("id", str)
self.initialize_attr("period_unit", str)
|
accepton/accepton-python
|
accepton/plan.py
|
Python
|
mit
| 444
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class BamReadcount(CMakePackage):
"""Bam-readcount generates metrics at single nucleotide positions."""
homepage = "https://github.com/genome/bam-readcount"
url = "https://github.com/genome/bam-readcount/archive/v0.8.0.tar.gz"
version('0.8.0', sha256='4f4dd558e3c6bfb24d6a57ec441568f7524be6639b24f13ea6f2bb350c7ea65f')
|
iulian787/spack
|
var/spack/repos/builtin/packages/bam-readcount/package.py
|
Python
|
lgpl-2.1
| 562
|
class MirrorException(Exception):
pass
|
EndPointCorp/lg_ros_nodes
|
lg_mirror/src/lg_mirror/__init__.py
|
Python
|
apache-2.0
| 43
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_geldmaschine
----------------------------------
Tests for `geldmaschine` module.
"""
import unittest
from geldmaschine import main
class TestGeldmaschine(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
elbaschid/geldmaschine
|
tests/test_geldmaschine.py
|
Python
|
mit
| 400
|
# Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
import json
from .common import BaseTest, load_data
class CloudDirectoryTest(BaseTest):
def test_cloud_directory(self):
session_factory = self.replay_flight_data('test_cloud_directory')
client = session_factory().client('clouddirectory')
schema_arn = client.create_schema(Name='gooseberry').get('SchemaArn')
self.addCleanup(client.delete_schema, SchemaArn=schema_arn)
schema_data = load_data('sample-clouddir-schema.json')
client.put_schema_from_json(
SchemaArn=schema_arn,
Document=json.dumps(schema_data))
published_schema = client.publish_schema(
DevelopmentSchemaArn=schema_arn,
Version="1").get('PublishedSchemaArn')
self.addCleanup(client.delete_schema, SchemaArn=published_schema)
dir_info = client.create_directory(
Name='c7n-test', SchemaArn=published_schema)
self.addCleanup(client.delete_directory, DirectoryArn=dir_info['DirectoryArn'])
self.addCleanup(client.disable_directory, DirectoryArn=dir_info['DirectoryArn'])
p = self.load_policy(
{'name': 'cloud-directory',
'resource': 'cloud-directory',
'filters': [
{'type': 'value',
'key': 'State',
'value': 'DELETED',
'op': 'not-equal'},
]
},
session_factory=session_factory)
resources = p.run()
self.assertEqual(len(resources), 1)
|
jimmyraywv/cloud-custodian
|
tests/test_directory.py
|
Python
|
apache-2.0
| 2,198
|
#!/usr/bin/python3
# Copyright 2014, Oliver Nagy <olitheolix@gmail.com>
#
# This file is part of Azrael (https://github.com/olitheolix/azrael)
#
# Azrael is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Azrael is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Azrael. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import pyassimp
import PIL.Image
import pyassimp.postprocess
import numpy as np
from IPython import embed as ipshell
def normaliseModel(scene):
# Find arithmetic model center.
minval = np.zeros((len(scene.meshes), 3))
maxval = np.zeros_like(minval)
for idx, mm in enumerate(scene.meshes):
minval[idx, :] = np.amin(mm.vertices, axis=0)
maxval[idx, :] = np.amax(mm.vertices, axis=0)
minval = np.amin(minval, axis=0)
maxval = np.amax(maxval, axis=0)
ofs = (minval + maxval) / 2
# Center the model and find the largest distance from the center.
max_dist = 1E-5
for idx, mm in enumerate(scene.meshes):
mm.vertices -= ofs
# Determine the largest norm.
dist = np.amax(np.sqrt(np.sum(mm.vertices ** 2, axis=1)))
if dist > max_dist:
max_dist = dist
# Scale the model so that the distance to the farthest vertex is unity.
for idx, mm in enumerate(scene.meshes):
mm.vertices /= max_dist
def loadMaterials(scene, fname):
"""
Load the textures from files into RGB arrays.
The RGB data for non-existing files is an empty array.
"""
# Get the path of the model file
fname = os.path.abspath(fname)
fpath, fname, = os.path.split(fname)
# Load each texture file.
materials = []
for mat_idx, mat in enumerate(scene.materials):
try:
# Extract the texture file name (may raise a KeyError).
mat_aux = dict(mat.properties)
texture_file = fpath + '/' + mat_aux[('file', 1)]
# Load the image (may raise a FileNotFoundError).
img = PIL.Image.open(texture_file)
except (FileNotFoundError, KeyError):
# No texture available.
materials.append({'RGB': [], 'width': 0, 'height': 0})
continue
# Convert the image to an RGB array.
width, height = img.size
img = img.transpose(PIL.Image.FLIP_TOP_BOTTOM)
RGB = np.fromstring(img.tobytes(), np.uint8).tolist()
del mat_aux, texture_file, img
# Sanity check.
assert (len(RGB) == width * height * 3)
materials.append({'RGB': RGB, 'width': width, 'height': height})
return materials
def loadObjects(scene, material_list):
mat2mesh = {}
for idx, mm in enumerate(scene.meshes):
mat_idx = mm.materialindex
if mat_idx not in mat2mesh:
mat2mesh[mat_idx] = []
mat2mesh[mat_idx].append(idx)
del idx, mm, mat_idx
set_vertex = []
set_UV = []
set_RGB = []
set_width = []
set_height = []
# Loop over all materials.
for mat_idx in mat2mesh:
vertex_material = []
UV_material = []
mat = material_list[mat_idx]
# Loop over all the meshes that use the current material and
# concatenate their vertices and UV maps.
for mesh_idx in mat2mesh[mat_idx]:
mesh = scene.meshes[mesh_idx]
# Flatten the mesh data, which comes as an Nx3 array.
vert = (mesh.vertices).flatten()
assert len(vert) % 9 == 0
# Copy the UV coordinates, if there are any.
if len(mesh.texturecoords) > 0:
UV = mesh.texturecoords[0][:, :2]
UV = UV.flatten()
else:
UV = 0.5 * np.ones(2 * (len(vert) // 3))
assert (len(UV) // 2 == len(vert) // 3)
# Add the vertices, and UV maps to the set.
vertex_material.extend(vert.tolist())
UV_material.extend(UV.tolist())
# Now that all those meshes from the scene that share the same material
set_vertex.append(vertex_material)
set_UV.append(UV_material)
set_RGB.append(material_list[mat_idx]['RGB'])
set_width.append(material_list[mat_idx]['width'])
set_height.append(material_list[mat_idx]['height'])
return set_vertex, set_UV, set_RGB, set_width, set_height
def loadModelAll(fname):
scene = pyassimp.load(fname, pyassimp.postprocess.aiProcess_Triangulate)
# Center and normalise the mesh.
normaliseModel(scene)
# Load all the materials.
material_list = loadMaterials(scene, fname)
# Associate mesh indexes with material indexes.
vert, UV, RGB, width, height = loadObjects(scene, material_list)
# Sanity check: the number of vertices must match the number of UV pairs.
assert (len(UV) == len(vert))
for ii in range(len(UV)):
assert (len(UV[ii]) // 2 == len(vert[ii]) // 3)
# Clear the UV data if there is no texture.
for ii in range(len(UV)):
if width[ii] == height[ii] == 0:
UV[ii] = np.array([], np.float64)
# Return the data as a dictionary.
data = {'vertices': vert,
'UV': UV,
'RGB': RGB,
'width': width,
'height': height}
return data
def loadModelMesh(fname):
data = loadModelAll(fname)
del data['UV'], data['RGB'], data['width'], data['height']
data['colors'] = []
for vert in data['vertices']:
# Create a random color vector. The alpha value is 1.0.
col = np.random.rand(4 * (len(vert) // 3))
col[3::4] = 1.0
data['colors'].append(col.tolist())
return data
if __name__ == '__main__':
fname = 'models/pencil/pencil.obj'
# fname = 'models/house/house.obj'
# fname = "models/cube/Rubik's Cube.obj"
data = loadModelAll(fname)
data = loadModelMesh(fname)
|
daviddeng/azrael
|
viewer/model_import.py
|
Python
|
agpl-3.0
| 6,307
|
#!/usr/bin/env python
"""igcollect - Redis
Copyright (c) 2018 InnoGames GmbH
"""
from argparse import ArgumentParser
from subprocess import check_output
from time import time
def parse_args():
parser = ArgumentParser()
parser.add_argument('--prefix', default='redis')
return parser.parse_args()
def main():
args = parse_args()
cfg = get_redis_conf('requirepass', 'port')
cli_command = ['redis-cli']
if 'requirepass' in cfg:
cli_command.extend(['-a', cfg['requirepass']])
if 'port' in cfg:
cli_command.extend(['-p', cfg['port']])
cli_command.append('info')
redis_info = check_output(cli_command)
redis_stats = {}
for x in redis_info.splitlines():
if x.find(b':') != -1:
key, value = x.split(b':')
redis_stats[key.decode('utf-8')] = value.decode('utf-8')
template = args.prefix + '.{} {} ' + str(int(time()))
headers = (
'total_connections_received',
'total_commands_processed',
'keyspace_hits',
'keyspace_misses',
'used_memory',
'used_cpu_sys',
'used_cpu_user',
'used_cpu_sys_children',
'used_cpu_user_children',
)
for metric in headers:
print(template.format(metric, redis_stats[metric]))
def get_redis_conf(*args):
"""Get requested parameters from the configuration"""
with open("/etc/redis/redis.conf") as fd:
content = fd.read().splitlines()
cfg = {}
for line in content:
parts = line.split()
if not parts:
continue
if parts[0] not in args:
continue
cfg[parts[0]] = parts[1]
return cfg
if __name__ == '__main__':
main()
|
innogames/igcollect
|
igcollect/redis.py
|
Python
|
mit
| 1,720
|
class BaseSchemaProvider:
def __init__(self, driver):
self.driver = driver
def refresh(self, schema):
raise NotImplementedError
|
andialbrecht/runsqlrun
|
rsr/schema/base.py
|
Python
|
mit
| 154
|
from __future__ import unicode_literals
import json
from django.test import TestCase, override_settings
from django.utils.http import urlquote
from django.core.urlresolvers import reverse
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Permission
from django.core.files.uploadedfile import SimpleUploadedFile
from django.template.defaultfilters import filesizeformat
# Get the chars that Django considers safe to leave unescaped in a URL
# This list changed in Django 1.8: https://github.com/django/django/commit/e167e96cfea670422ca75d0b35fe7c4195f25b63
try:
from django.utils.http import RFC3986_SUBDELIMS
urlquote_safechars = RFC3986_SUBDELIMS + str('/~:@')
except ImportError: # < Django 1,8
urlquote_safechars = '/'
from wagtail.tests.utils import WagtailTestUtils
from wagtail.wagtailimages.utils import generate_signature
from .utils import Image, get_test_image_file
class TestImageIndexView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:index'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/index.html')
self.assertContains(response, "Add an image")
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.get({'ordering': ordering})
self.assertEqual(response.status_code, 200)
class TestImageAddView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:add'), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages:add'), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
def test_add(self):
response = self.post({
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
# Test that the file_size field was set
self.assertTrue(image.file_size)
def test_add_no_file_selected(self):
response = self.post({
'title': "Test image",
})
# Shouldn't redirect anywhere
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
# The form should have an error
self.assertFormError(response, 'form', 'file', "This field is required.")
@override_settings(WAGTAILIMAGES_MAX_UPLOAD_SIZE=1)
def test_add_too_large_file(self):
file_content = get_test_image_file().file.getvalue()
response = self.post({
'title': "Test image",
'file': SimpleUploadedFile('test.png', file_content),
})
# Shouldn't redirect anywhere
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/add.html')
# The form should have an error
self.assertFormError(response, 'form', 'file', "This file is too big ({file_size}). Maximum filesize {max_file_size}.".format(
file_size=filesizeformat(len(file_content)),
max_file_size=filesizeformat(1),
))
class TestImageEditView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages:edit', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages:edit', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
def test_edit(self):
response = self.post({
'title': "Edited",
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was edited
image = Image.objects.get(id=self.image.id)
self.assertEqual(image.title, "Edited")
def test_edit_with_new_image_file(self):
file_content = get_test_image_file().file.getvalue()
# Change the file size of the image
self.image.file_size = 100000
self.image.save()
response = self.post({
'title': "Edited",
'file': SimpleUploadedFile('new.png', file_content),
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image file size changed (assume it changed to the correct value)
image = Image.objects.get(id=self.image.id)
self.assertNotEqual(image.file_size, 100000)
def test_with_missing_image_file(self):
self.image.file.delete(False)
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
class TestImageDeleteView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages:delete', args=(self.image.id,)), params)
def post(self, post_data={}):
return self.client.post(reverse('wagtailimages:delete', args=(self.image.id,)), post_data)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/confirm_delete.html')
def test_delete(self):
response = self.post({
'hello': 'world'
})
# Should redirect back to index
self.assertRedirects(response, reverse('wagtailimages:index'))
# Check that the image was deleted
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 0)
class TestImageChooserView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:chooser'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
def test_search(self):
response = self.get({'q': "Hello"})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], "Hello")
def test_pagination(self):
pages = ['0', '1', '-1', '9999', 'Not a page']
for page in pages:
response = self.get({'p': page})
self.assertEqual(response.status_code, 200)
class TestImageChooserChosenView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def get(self, params={}):
return self.client.get(reverse('wagtailimages:image_chosen', args=(self.image.id,)), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/image_chosen.js')
# TODO: Test posting
class TestImageChooserUploadView(TestCase, WagtailTestUtils):
def setUp(self):
self.login()
def get(self, params={}):
return self.client.get(reverse('wagtailimages:chooser_upload'), params)
def test_simple(self):
response = self.get()
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.js')
def test_upload(self):
response = self.client.post(reverse('wagtailimages:chooser_upload'), {
'title': "Test image",
'file': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
})
# Check response
self.assertEqual(response.status_code, 200)
# Check that the image was created
images = Image.objects.filter(title="Test image")
self.assertEqual(images.count(), 1)
# Test that size was populated correctly
image = images.first()
self.assertEqual(image.width, 640)
self.assertEqual(image.height, 480)
def test_upload_no_file_selected(self):
response = self.client.post(reverse('wagtailimages:chooser_upload'), {
'title': "Test image",
})
# Shouldn't redirect anywhere
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/chooser/chooser.html')
# The form should have an error
self.assertFormError(response, 'uploadform', 'file', "This field is required.")
class TestMultipleImageUploader(TestCase, WagtailTestUtils):
"""
This tests the multiple image upload views located in wagtailimages/views/multiple.py
"""
def setUp(self):
self.login()
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
def test_add(self):
"""
This tests that the add view responds correctly on a GET request
"""
# Send request
response = self.client.get(reverse('wagtailimages:add_multiple'))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/multiple/add.html')
@override_settings(WAGTAILIMAGES_MAX_UPLOAD_SIZE=1000)
def test_add_max_file_size_context_variables(self):
response = self.client.get(reverse('wagtailimages:add_multiple'))
self.assertEqual(response.context['max_filesize'], 1000)
self.assertEqual(response.context['error_max_file_size'], "This file is too big. Maximum filesize 1000\xa0bytes.")
def test_add_post(self):
"""
This tests that a POST request to the add view saves the image and returns an edit form
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {
'files[]': SimpleUploadedFile('test.png', get_test_image_file().file.getvalue()),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtailimages/multiple/edit_form.html')
# Check image
self.assertIn('image', response.context)
self.assertEqual(response.context['image'].title, 'test.png')
self.assertTrue(response.context['image'].file_size)
# Check form
self.assertIn('form', response.context)
self.assertEqual(response.context['form'].initial['title'], 'test.png')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], response.context['image'].id)
self.assertTrue(response_json['success'])
def test_add_post_noajax(self):
"""
This tests that only AJAX requests are allowed to POST to the add view
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {})
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_nofile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 400)
def test_add_post_badfile(self):
"""
This tests that the add view checks for a file when a user POSTs to it
"""
response = self.client.post(reverse('wagtailimages:add_multiple'), {
'files[]': SimpleUploadedFile('test.png', b"This is not an image!"),
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertNotIn('image_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertIn('error_message', response_json)
self.assertFalse(response_json['success'])
self.assertEqual(response_json['error_message'], "Not a supported image format. Supported formats: GIF, JPEG, PNG.")
def test_edit_get(self):
"""
This tests that a GET request to the edit view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtailimages:edit_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_edit_post(self):
"""
This tests that a POST request to the edit view edits the image
"""
# Send request
response = self.client.post(reverse('wagtailimages:edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "New title!",
('image-%d-tags' % self.image.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertNotIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertTrue(response_json['success'])
def test_edit_post_noajax(self):
"""
This tests that a POST request to the edit view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtailimages:edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "New title!",
('image-%d-tags' % self.image.id): "",
})
# Check response
self.assertEqual(response.status_code, 400)
def test_edit_post_validation_error(self):
"""
This tests that a POST request to the edit page returns a json document with "success=False"
and a form with the validation error indicated
"""
# Send request
response = self.client.post(reverse('wagtailimages:edit_multiple', args=(self.image.id, )), {
('image-%d-title' % self.image.id): "", # Required
('image-%d-tags' % self.image.id): "",
}, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
self.assertTemplateUsed(response, 'wagtailimages/multiple/edit_form.html')
# Check that a form error was raised
self.assertFormError(response, 'form', 'title', "This field is required.")
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('form', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertFalse(response_json['success'])
def test_delete_get(self):
"""
This tests that a GET request to the delete view returns a 405 "METHOD NOT ALLOWED" response
"""
# Send request
response = self.client.get(reverse('wagtailimages:delete_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 405)
def test_delete_post(self):
"""
This tests that a POST request to the delete view deletes the image
"""
# Send request
response = self.client.post(reverse('wagtailimages:delete_multiple', args=(self.image.id, )), HTTP_X_REQUESTED_WITH='XMLHttpRequest')
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Make sure the image is deleted
self.assertFalse(Image.objects.filter(id=self.image.id).exists())
# Check JSON
response_json = json.loads(response.content.decode())
self.assertIn('image_id', response_json)
self.assertIn('success', response_json)
self.assertEqual(response_json['image_id'], self.image.id)
self.assertTrue(response_json['success'])
def test_delete_post_noajax(self):
"""
This tests that a POST request to the delete view without AJAX returns a 400 response
"""
# Send request
response = self.client.post(reverse('wagtailimages:delete_multiple', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 400)
class TestURLGeneratorView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
This tests that the view responds correctly for a user with edit permissions on this image
"""
# Get
response = self.client.get(reverse('wagtailimages:url_generator', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/url_generator.html')
def test_get_bad_permissions(self):
"""
This tests that the view gives a 403 if a user without correct permissions attemts to access it
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get
response = self.client.get(reverse('wagtailimages:url_generator', args=(self.image.id, )))
# Check response
self.assertEqual(response.status_code, 403)
class TestGenerateURLView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
This tests that the view responds correctly for a user with edit permissions on this image
"""
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
content_json = json.loads(response.content.decode())
self.assertEqual(set(content_json.keys()), set(['url', 'preview_url']))
expected_url = 'http://localhost/images/%(signature)s/%(image_id)d/fill-800x600/' % {
'signature': urlquote(generate_signature(self.image.id, 'fill-800x600').decode(), safe=urlquote_safechars),
'image_id': self.image.id,
}
self.assertEqual(content_json['url'], expected_url)
expected_preview_url = reverse('wagtailimages:preview', args=(self.image.id, 'fill-800x600'))
self.assertEqual(content_json['preview_url'], expected_preview_url)
def test_get_bad_permissions(self):
"""
This tests that the view gives a 403 if a user without correct permissions attemts to access it
"""
# Remove privileges from user
self.user.is_superuser = False
self.user.user_permissions.add(
Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
)
self.user.save()
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 403)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'You do not have permission to generate a URL for this image.',
}))
def test_get_bad_image(self):
"""
This tests that the view gives a 404 response if a user attempts to use it with an image which doesn't exist
"""
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id + 1, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 404)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'Cannot find image.',
}))
def test_get_bad_filter_spec(self):
"""
This tests that the view gives a 400 response if the user attempts to use it with an invalid filter spec
"""
# Get
response = self.client.get(reverse('wagtailimages:generate_url', args=(self.image.id, 'bad-filter-spec')))
# Check response
self.assertEqual(response.status_code, 400)
self.assertEqual(response['Content-Type'], 'application/json')
# Check JSON
self.assertJSONEqual(response.content.decode(), json.dumps({
'error': 'Invalid filter spec.',
}))
class TestPreviewView(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image for running tests on
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Login
self.user = self.login()
def test_get(self):
"""
Test a valid GET request to the view
"""
# Get the image
response = self.client.get(reverse('wagtailimages:preview', args=(self.image.id, 'fill-800x600')))
# Check response
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'image/png')
def test_get_invalid_filter_spec(self):
"""
Test that an invalid filter spec returns a 400 response
This is very unlikely to happen in reality. A user would have
to create signature for the invalid filter spec which can't be
done with Wagtails built in URL generator. We should test it
anyway though.
"""
# Get the image
response = self.client.get(reverse('wagtailimages:preview', args=(self.image.id, 'bad-filter-spec')))
# Check response
self.assertEqual(response.status_code, 400)
class TestEditOnlyPermissions(TestCase, WagtailTestUtils):
def setUp(self):
# Create an image to edit
self.image = Image.objects.create(
title="Test image",
file=get_test_image_file(),
)
# Create a user with change_image permission but not add_image
user = get_user_model().objects.create_user(username='changeonly', email='changeonly@example.com', password='password')
change_permission = Permission.objects.get(content_type__app_label='wagtailimages', codename='change_image')
admin_permission = Permission.objects.get(content_type__app_label='wagtailadmin', codename='access_admin')
user.user_permissions.add(change_permission, admin_permission)
self.client.login(username='changeonly', password='password')
def test_get_index(self):
response = self.client.get(reverse('wagtailimages:index'))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/index.html')
# user should not get an "Add an image" button
self.assertNotContains(response, "Add an image")
# user should be able to see images not owned by them
self.assertContains(response, "Test image")
def test_get_add(self):
response = self.client.get(reverse('wagtailimages:add'))
# permission should be denied
self.assertRedirects(response, reverse('wagtailadmin_home'))
def test_get_edit(self):
response = self.client.get(reverse('wagtailimages:edit', args=(self.image.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/edit.html')
def test_get_delete(self):
response = self.client.get(reverse('wagtailimages:delete', args=(self.image.id,)))
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'wagtailimages/images/confirm_delete.html')
def test_get_add_multiple(self):
response = self.client.get(reverse('wagtailimages:add_multiple'))
# permission should be denied
self.assertRedirects(response, reverse('wagtailadmin_home'))
|
rv816/wagtail
|
wagtail/wagtailimages/tests/test_admin_views.py
|
Python
|
bsd-3-clause
| 27,333
|
# Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved.
# Copyright (c) 2006-2007 Sippy Software, Inc. All rights reserved.
#
# This file is part of SIPPY, a free RFC3261 SIP stack and B2BUA.
#
# SIPPY is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# For a license to use the SIPPY software under conditions
# other than those described here, or to purchase support for this
# software, please contact Sippy Software, Inc. by e-mail at the
# following addresses: sales@sippysoft.com.
#
# SIPPY is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
from .SipAddressHF import SipAddressHF
class SipCCDiversion(SipAddressHF):
hf_names = ('cc-diversion',)
def getCanName(self, name, compact = False):
return 'CC-Diversion'
|
hgascon/pulsar
|
pulsar/core/sippy/SipCCDiversion.py
|
Python
|
bsd-3-clause
| 1,314
|
import sys
def convert(n):
if n[0] in ('i', 'o', 'u', 'y', 'e', 'a'):
return n + "yay"
else:
return n[1:] + n[0] + "ay"
# main function that calls other functions
def main():
while(1):
try:
try:
gottenString = input("Enter a word for vowel counting: ")
if gottenString != 'q':
print(convert(gottenString))
except(KeyboardInterrupt):
exit("")
except(EOFError):
exit("")
main()
|
carlson-erik/ProgrammingExercises
|
Pig-Latin/pl_converter.py
|
Python
|
mit
| 528
|
# -*- coding: utf-8 -*-
#
"""
test_abnf.py
websocket - WebSocket client library for Python
Copyright 2021 engn33r
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import websocket as ws
from websocket._abnf import *
import sys
import unittest
sys.path[0:0] = [""]
class ABNFTest(unittest.TestCase):
def testInit(self):
a = ABNF(0,0,0,0, opcode=ABNF.OPCODE_PING)
self.assertEqual(a.fin, 0)
self.assertEqual(a.rsv1, 0)
self.assertEqual(a.rsv2, 0)
self.assertEqual(a.rsv3, 0)
self.assertEqual(a.opcode, 9)
self.assertEqual(a.data, '')
a_bad = ABNF(0,1,0,0, opcode=77)
self.assertEqual(a_bad.rsv1, 1)
self.assertEqual(a_bad.opcode, 77)
def testValidate(self):
a_invalid_ping = ABNF(0,0,0,0, opcode=ABNF.OPCODE_PING)
self.assertRaises(ws._exceptions.WebSocketProtocolException, a_invalid_ping.validate, skip_utf8_validation=False)
a_bad_rsv_value = ABNF(0,1,0,0, opcode=ABNF.OPCODE_TEXT)
self.assertRaises(ws._exceptions.WebSocketProtocolException, a_bad_rsv_value.validate, skip_utf8_validation=False)
a_bad_opcode = ABNF(0,0,0,0, opcode=77)
self.assertRaises(ws._exceptions.WebSocketProtocolException, a_bad_opcode.validate, skip_utf8_validation=False)
a_bad_close_frame = ABNF(0,0,0,0, opcode=ABNF.OPCODE_CLOSE, data=b'\x01')
self.assertRaises(ws._exceptions.WebSocketProtocolException, a_bad_close_frame.validate, skip_utf8_validation=False)
a_bad_close_frame_2 = ABNF(0,0,0,0, opcode=ABNF.OPCODE_CLOSE, data=b'\x01\x8a\xaa\xff\xdd')
self.assertRaises(ws._exceptions.WebSocketProtocolException, a_bad_close_frame_2.validate, skip_utf8_validation=False)
a_bad_close_frame_3 = ABNF(0,0,0,0, opcode=ABNF.OPCODE_CLOSE, data=b'\x03\xe7')
self.assertRaises(ws._exceptions.WebSocketProtocolException, a_bad_close_frame_3.validate, skip_utf8_validation=True)
def testMask(self):
abnf_none_data = ABNF(0,0,0,0, opcode=ABNF.OPCODE_PING, mask=1, data=None)
bytes_val = bytes("aaaa", 'utf-8')
self.assertEqual(abnf_none_data._get_masked(bytes_val), bytes_val)
abnf_str_data = ABNF(0,0,0,0, opcode=ABNF.OPCODE_PING, mask=1, data="a")
self.assertEqual(abnf_str_data._get_masked(bytes_val), b'aaaa\x00')
def testFormat(self):
abnf_bad_rsv_bits = ABNF(2,0,0,0, opcode=ABNF.OPCODE_TEXT)
self.assertRaises(ValueError, abnf_bad_rsv_bits.format)
abnf_bad_opcode = ABNF(0,0,0,0, opcode=5)
self.assertRaises(ValueError, abnf_bad_opcode.format)
abnf_length_10 = ABNF(0,0,0,0, opcode=ABNF.OPCODE_TEXT, data="abcdefghij")
self.assertEqual(b'\x01', abnf_length_10.format()[0].to_bytes(1, 'big'))
self.assertEqual(b'\x8a', abnf_length_10.format()[1].to_bytes(1, 'big'))
self.assertEqual("fin=0 opcode=1 data=abcdefghij", abnf_length_10.__str__())
abnf_length_20 = ABNF(0,0,0,0, opcode=ABNF.OPCODE_BINARY, data="abcdefghijabcdefghij")
self.assertEqual(b'\x02', abnf_length_20.format()[0].to_bytes(1, 'big'))
self.assertEqual(b'\x94', abnf_length_20.format()[1].to_bytes(1, 'big'))
abnf_no_mask = ABNF(0,0,0,0, opcode=ABNF.OPCODE_TEXT, mask=0, data=b'\x01\x8a\xcc')
self.assertEqual(b'\x01\x03\x01\x8a\xcc', abnf_no_mask.format())
def testFrameBuffer(self):
fb = frame_buffer(0, True)
self.assertEqual(fb.recv, 0)
self.assertEqual(fb.skip_utf8_validation, True)
fb.clear
self.assertEqual(fb.header, None)
self.assertEqual(fb.length, None)
self.assertEqual(fb.mask, None)
self.assertEqual(fb.has_mask(), False)
if __name__ == "__main__":
unittest.main()
|
JonnyWong16/plexpy
|
lib/websocket/tests/test_abnf.py
|
Python
|
gpl-3.0
| 4,232
|
# CubiCal: a radio interferometric calibration suite
# (c) 2017 Rhodes University & Jonathan S. Kenyon
# http://github.com/ratt-ru/CubiCal
# This code is distributed under the terms of GPLv2, see LICENSE.md for details
"""
Handles the interface between measurement sets, CubiCal and Montblanc.
"""
from __future__ import print_function
from builtins import range
import numpy as np
import montblanc
import montblanc.util as mbu
import montblanc.impl.rime.tensorflow.ms.ms_manager as MS
from montblanc.impl.rime.tensorflow.sources import SourceProvider
from montblanc.impl.rime.tensorflow.sinks import SinkProvider
import datetime as dt
import pyrap.quanta as pq
from cubical.tools import logger, ModColor
log = logger.getLogger("MBSourceProvider")
class MSSourceProvider(SourceProvider):
"""
Handles interface between CubiCal tiles and Montblanc simulation.
"""
def __init__(self, tile, time_col, antea, anteb, ddid_col, uvw, freqs, sort_ind, nrows, do_pa_rotation=True):
"""
Initialises this source provider.
Args:
tile (:obj:`~cubical.data_handler.Tile`):
Tile object containing information about current data selection.
uvw (np.darray):
(n_row, 3) array of UVW coordinates.
freqs (np.darray):
(n_ddid,n_chan) array of frequencies.
sort_ind (np.ndarray):
Indices which will produce sorted data. Montblanc expects data to be ordered in a specific way.
nrows (int):
Number of rows in the UNPADDED data. This is necessary for the revised uvw code.
"""
self._ms = tile.dh.ms
self._ms_name = tile.dh.ms_name
self._name = "Measurement set '{ms}'".format(ms=self._ms_name)
self._antpos = tile.dh.antpos
self._phadir = tile.dh.phadir
self._ntime = len(np.unique(time_col))
self._nchan = freqs.shape[1]
self._nants = tile.dh.nants
self._ncorr = tile.dh.ncorr
self._nbl = (self._nants*(self._nants - 1))/2
self._freqs = freqs
self._times = time_col
self._antea = antea
self._anteb = anteb
self._ddids = ddid_col
self._nddid = freqs.shape[0]
self._uvwco = uvw # data['uvwco']
self._nrows = nrows
self.sort_ind = sort_ind
self.do_pa_rotation = do_pa_rotation
def name(self):
""" Returns name of associated source provider. """
return self._name
def updated_dimensions(self):
""" Inform Montblanc of the dimensions assosciated with this source provider. """
return [('ntime', self._ntime),
('nbl', self._nbl),
('na', self._nants),
('nchan', self._nchan*self._nddid),
('nbands', self._nddid),
('npol', 4),
('npolchan', 4*self._nchan)]
def frequency(self, context):
""" Provides Montblanc with an array of frequencies. """
return self._freqs.reshape(context.shape).astype(context.dtype)
def uvw(self, context):
""" Provides Montblanc with an array of uvw coordinates. """
# Figure out our extents in the time dimension and our global antenna and baseline sizes.
(t_low, t_high) = context.dim_extents('ntime')
# Figure out chunks in time (may be repetitious, but needed an easy fix).
_, counts = np.unique(self._times[:self._nrows], return_counts=True)
chunks = np.asarray(counts)
# Compute per antenna uvw coordinates. Data must be ordered by time.
# Per antenna uvw coordinates fail on data where time!=time_centroid.
ant_uvw = mbu.antenna_uvw(self._uvwco[:self._nrows],
self._antea[:self._nrows],
self._anteb[:self._nrows],
chunks,
self._nants,
check_missing=False,
check_decomposition=False,
max_err=100)
return ant_uvw[t_low:t_high, ...].astype(context.dtype)
def antenna1(self, context):
""" Provides Montblanc with an array of antenna1 values. """
lrow, urow = MS.uvw_row_extents(context)
antenna1 = self._antea[self.sort_ind][lrow:urow]
return antenna1.reshape(context.shape).astype(context.dtype)
def antenna2(self, context):
""" Provides Montblanc with an array of antenna2 values. """
lrow, urow = MS.uvw_row_extents(context)
antenna2 = self._anteb[self.sort_ind][lrow:urow]
return antenna2.reshape(context.shape).astype(context.dtype)
def parallactic_angles(self, context):
""" Provides Montblanc with an array of parallactic angles. """
# Time and antenna extents
(lt, ut), (la, ua) = context.dim_extents('ntime', 'na')
if not self.do_pa_rotation:
return np.zeros(context.shape, dtype=context.dtype)
def __mjd2dt(utc_timestamp):
"""
Converts array of UTC timestamps to list of datetime objects for human readable printing
"""
return [dt.datetime.utcfromtimestamp(pq.quantity(t, "s").to_unix_time()) for t in utc_timestamp]
utc_times = np.unique(self._times[self.sort_ind])[lt:ut]
dt_start = __mjd2dt([np.min(utc_times)])[0].strftime('%Y/%m/%d %H:%M:%S')
dt_end = __mjd2dt([np.max(utc_times)])[0].strftime('%Y/%m/%d %H:%M:%S')
log(2).print("Computing parallactic angles for times between %s and %s UTC" % (dt_start, dt_end))
return mbu.parallactic_angles(
np.unique(self._times[self.sort_ind])[lt:ut],
self._antpos[la:ua],
self._phadir).reshape(context.shape).astype(context.dtype)
def feed_angles(self, context):
""" Provides Montblanc with an array of feed angles. """
(la, ua) = context.dim_extents('na')
# TODO(osmirnov)
# Please fill me in
return np.zeros(ua-la, dtype=context.dtype)
def __enter__(self):
return self
def __exit__(self, etype, evalue, etraceback):
self.close()
def __str__(self):
return self.__class__.__name__
class ColumnSinkProvider(SinkProvider):
"""
Handles Montblanc output and makes it consistent with the measurement set.
"""
def __init__(self, dh, freqshape, model, sort_ind):
"""
Initialises this sink provider.
Args:
dh (:obj:`~cubical.data_handler.MSDataHandler`):
Data handler object.
freqshape (tuple):
Shape of frequency array, i.e. (nddid, nchan)
model (np.ndarray):
Array of model visibilities into which output will be written.
sort_ind (np.ndarray):
Indices which will produce sorted data. Montblanc expects adata to be ordered.
"""
self._model = model
self._ncorr = dh.ncorr
self._name = "Measurement Set '{ms}'".format(ms=dh.ms_name)
self._dir = 0
self.sort_ind = sort_ind
self._nddid, self._chan_per_ddid = freqshape
def name(self):
""" Returns name of associated sink provider. """
return self._name
def set_direction(self, idir):
"""Sets current direction being simulated.
Args:
idir (int):
Direction number, from 0 to n_dir-1
"""
self._dir = idir
def model_vis(self, context):
""" Tells Montblanc how to handle the model visibility output. """
(lt, ut), (lbl, ubl), (lc, uc) = context.dim_extents('ntime', 'nbl', 'nchan')
lower, upper = MS.row_extents(context, ("ntime", "nbl"))
ntime, nbl, nchan = context.dim_global_size('ntime', 'nbl', 'nchan')
rows_per_ddid = ntime*nbl
if self._ncorr == 1:
sel = 0
elif self._ncorr == 2:
sel = slice(None, None, 3)
else:
sel = slice(None)
for ddid_ind in range(self._nddid):
offset = ddid_ind*rows_per_ddid
lr = lower + offset
ur = upper + offset
lc = ddid_ind*self._chan_per_ddid
uc = (ddid_ind+1)*self._chan_per_ddid
self._model[self._dir, 0, lr:ur, :, :] += \
context.data[:,:,lc:uc,sel].reshape(-1, self._chan_per_ddid, self._ncorr)
def __str__(self):
return self.__class__.__name__
_mb_slvr = None
def simulate(src_provs, snk_provs, polarisation_type, opts):
"""
Convenience function which creates and executes a Montblanc solver for the given source and
sink providers.
Args:
src_provs (list):
List of :obj:`~montblanc.impl.rime.tensorflow.sources.SourceProvider` objects. See
Montblanc's documentation.
snk_provs (list):
List of :obj:`~montblanc.impl.rime.tensorflow.sinks.SinkProvider` objects. See
Montblanc's documentation.
opts (dict):
Montblanc simulation options (see [montblanc] section in DefaultParset.cfg).
"""
global _mb_slvr
if _mb_slvr is None:
slvr_cfg = montblanc.rime_solver_cfg(
mem_budget=opts["mem-budget"]*1024*1024,
dtype=opts["dtype"],
polarisation_type=polarisation_type,
device_type=opts["device-type"])
_mb_slvr = montblanc.rime_solver(slvr_cfg)
_mb_slvr.solve(source_providers=src_provs, sink_providers=snk_provs)
import atexit
def _shutdown_mb_slvr():
global _mb_slvr
if _mb_slvr is not None:
_mb_slvr.close()
atexit.register(_shutdown_mb_slvr)
|
ratt-ru/CubiCal
|
cubical/data_handler/MBTiggerSim.py
|
Python
|
gpl-2.0
| 9,846
|
# Original SourceCode
# https://github.com/blackducksoftware/ohloh_api/blob/master/examples/account_sample.py
"""
The MIT License (MIT)
Copyright (c) 2013 Thijs Triemstra
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
"""
This is an example of using the Ohloh API from Python.
Detailed information can be found on GitHub:
https://github.com/blackducksw/ohloh_api
This example uses the ElementTree library for XML parsing
(included in Python 2.5 and newer):
http://effbot.org/zone/element-index.htm
This example retrieves basic Ohloh account information
and outputs it as simple name: value pairs.
Pass your Ohloh API key as the first parameter to this script.
Ohloh API keys are free. If you do not have one, you can obtain one
at the Ohloh website:
https://www.openhub.net/accounts/<your_login>/api_keys/new
Pass the email address of the account as the second parameter to this script.
"""
'''
Changes Inclusion Update
Copyright (c) 2016
Author: Sai Uday Shankar Korlimarla
Email: skorlimarla@unomaha.edu
'''
import sys
import urllib
import hashlib
from os.path import expanduser
from pymongo import MongoClient
# import ElementTree based on the python version
'''
# Hailmary elementtree imports
# using xml.etree for now
try:
import elementtree.ElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
'''
import xml.etree.ElementTree as ET
# We pass the MD5 hash of the email address
def setOhlohBaseURL():
'''
Ohloh base URL is https://www.openhub.net/
'''
return "https://www.openhub.net/"
def email_to_md5(emailAddress):
'''
emailAddress: Address of a person to be found on Ohloh
email_to_md5 returns MD5 hexdigest
'''
email = hashlib.md5()
email.update(emailAddress)
return str(email.hexdigest())
def setOhlohAPIkey():
'''
Read Ohloh API Key from configuration file
'''
apiKey = ''
home = expanduser("~")
file = home + '/.apeconfig/openhub.conf'
with open(file, 'r') as OhlohConf:
apiKey = OhlohConf.readline().strip()
return urllib.urlencode({'api_key': apiKey, 'v': 1})
def setOhlohPathType(path):
'''
Choice of paths on Ohloh:
Usage:
Choose one of (people, organization, project)
'''
OhlohPaths = {"people":'accounts', 'organization':'org', 'project': 'p' }
return OhlohPaths[path]
def preParse(OhlohBaseURL, OhlohPath, searchAttr, params):
'''
Construct a URL for Ohloh API
Args: OhlohBaseURL, path, search attributes and params (API Key)
Warning: if something other than email is used, hexdigest is still produced
'''
if OhlohPath == 'accounts':
searchAttr = email_to_md5(searchAttr)
OhlohFormedURL = "{0}{1}/{2}.xml?{3}".format(OhlohBaseURL, OhlohPath, searchAttr, params)
return OhlohFormedURL
def xmlDocTree(OhlohFormedURL):
'''
Read a URL and return XML object of Ohloh results
Args:
Ohloh URL to access Ohloh API
'''
f = urllib.urlopen(OhlohFormedURL)
# Parse the response into a structured XML object and return
tree = ET.parse(f)
elementTree = tree.getroot()
error = elementTree.find("error")
if error:
print('Ohloh returned:', ET.tostring(error))
sys.exit(-1)
return elementTree
def parse_people(elementTree):
'''
Parse and Pick content if path choice is people
Args:
elementTree from xmlDocTree
'''
for node in elementTree.find("result/account"):
if node.tag == "kudo_score":
print "%s:" % node.tag
for score in elem.find("result/account/kudo_score"):
print "\t%s:\t%s" % (score.tag, score.text)
else:
print "%s:\t%s" % (node.tag, node.text)
def parse_project(elementTree):
'''
Parse and pick content if path choice is project
Args:
elementTree from xmlDocTree
Return:
1. Tags for project are returned
'''
openhubData = {}
tags = []
facts = {}
languages = {}
logo = ''
loc = ''
commitcount = ''
main_lang = ''
activity_index = {}
description = ''
url = ''
created_at = ''
updated_at = ''
rating = ''
rating_count = ''
review_count = ''
project_id = ''
project_name = ''
if elementTree.find("result/project/name") is not None:
project_name = elementTree.find("result/project/name").text
if elementTree.find("result/project/id") is not None:
project_id = elementTree.find("result/project/id").text
if elementTree.find("result/project/tags") is not None:
for node in elementTree.find("result/project/tags"):
tags.append(node.text.strip())
if elementTree.find("result/project/analysis/factoids") is not None:
for node in elementTree.find("result/project/analysis/factoids"):
facts[node.attrib['type'].strip()] = node.text.strip()
if elementTree.find("result/project/analysis/languages") is not None:
for node in elementTree.find("result/project/analysis/languages"):
languages[node.text.strip()] = node.attrib['percentage'].strip()
logo = ''
if elementTree.find("result/project/small_logo_url") is not None:
#print elementTree.find("result/project/small_logo_url")
logo = elementTree.find("result/project/small_logo_url").text
if elementTree.find("result/project/analysis/total_code_lines") is not None:
loc = elementTree.find("result/project/analysis/total_code_lines").text
if elementTree.find("result/project/analysis/total_commit_count") is not None:
commitcount = elementTree.find("result/project/analysis/total_commit_count").text
if elementTree.find("result/project/analysis/main_language_name") is not None:
main_lang = elementTree.find("result/project/analysis/main_language_name").text
if elementTree.find("result/project/project_activity_index"):
for node in elementTree.find("result/project/project_activity_index"):
activity_index[node.tag] = node.text
if elementTree.find("result/project/description") is not None:
description = elementTree.find("result/project/description").text
if elementTree.find("result/project/url") is not None:
url = elementTree.find("result/project/url").text
if elementTree.find("result/project/updated_at") is not None:
updated_at = elementTree.find("result/project/updated_at").text
if elementTree.find("result/project/created_at") is not None:
created_at = elementTree.find("result/project/created_at").text
if elementTree.find("result/project/average_rating") is not None:
rating = elementTree.find("result/project/average_rating").text
if elementTree.find("result/project/rating_count") is not None:
rating_count = elementTree.find("result/project/rating_count").text
if elementTree.find("result/project/review_count") is not None:
review_count = elementTree.find("result/project/review_count").text
openhubData['project_id'] = project_id
openhubData['project_name'] = project_name
openhubData['facts'] = facts
openhubData['tags'] = tags
openhubData['languages'] = languages
openhubData['logo'] = logo
openhubData['loc'] = loc
openhubData['commitcount'] = commitcount
openhubData['main_lang'] = main_lang
openhubData['activity_index'] = activity_index
openhubData['description'] = description
openhubData['url'] = url
openhubData['created_at'] = created_at
openhubData['updated_at'] = updated_at
openhubData['rating'] = rating
openhubData['rating_count'] = rating_count
openhubData['review_count'] = review_count
return openhubData
def parse_organization(elementTree):
'''
*******************************
* This is not yet implemented *
*******************************
Parse and pick content if path choice is organization
Args:
elementTree from xmlDocTree
'''
raise NotImplementedError
def insert_into_database(openhubData, upload_id):
mongo_url = 'mongodb://localhost:27017/'
client = MongoClient(mongo_url)
ape_db = client['apedb']
openhub = ape_db.openhub
uploads = ape_db.uploads
uploads.update({'_id': upload_id},{'$set': {'logo': openhubData['logo']}})
if openhub.find_one(openhubData) is None:
post_id = openhub.insert_one(openhubData).inserted_id
return post_id
else:
existing = ''
existing = openhub.find_one(openhubData)
return existing['_id']
if __name__ == '__main__':
requstedPath = sys.argv[1]
searchAttr = sys.argv[2]
OhlohPath = setOhlohPathType(requstedPath)
searchAttr = sys.argv[2]
upload_id = sys.argv[3]
params = setOhlohAPIkey()
baseURL = setOhlohBaseURL()
OhlohAbsURL = preParse(OhlohBaseURL= baseURL,OhlohPath=OhlohPath, searchAttr=searchAttr, params=params)
elementTree = xmlDocTree(OhlohAbsURL)
if requstedPath == 'people':
parse_people(elementTree=elementTree)
elif requstedPath == 'organization':
parse_organization(elementTree=elementTree)
elif requstedPath == 'project':
openhubData = parse_project(elementTree=elementTree)
mid = insert_into_database(openhubData, upload_id)
sys.stdout.write(str(mid))
|
UShan89/ape
|
scanners/openhub.py
|
Python
|
gpl-3.0
| 10,601
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Midokura Japan K.K.
# Copyright (C) 2013 Midokura PTE LTD
# Copyright (C) 2014 Midokura SARL.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Takaaki Suzuki, Midokura Japan KK
# @author: Tomoe Sugihara, Midokura Japan KK
# @author: Ryu Ishimoto, Midokura Japan KK
# @author: Rossella Sblendido, Midokura Japan KK
# @author: Duarte Nunes, Midokura Japan KK
from oslo.config import cfg
from midonetclient import client
from midonet.neutron import api
from midonet.neutron.common import config # noqa
from midonet.neutron.common import util
from midonet.neutron.db import task
from midonet.neutron import extensions
from sqlalchemy import exc as sa_exc
from neutron.api import extensions as neutron_extensions
from neutron.api.rpc.handlers import dhcp_rpc
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import api as db
from neutron.db import db_base_plugin_v2
from neutron.db import external_net_db
from neutron.db import l3_gwmode_db
from neutron.db.loadbalancer import loadbalancer_db
from neutron.db import portbindings_db
from neutron.db import routedserviceinsertion_db as rsi_db
from neutron.db import securitygroups_db
from neutron.extensions import portbindings
from neutron.extensions import routedserviceinsertion as rsi
from neutron.extensions import securitygroup as ext_sg
from neutron.openstack.common import excutils
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.plugins.common import constants
LOG = logging.getLogger(__name__)
class MidonetPluginV2(db_base_plugin_v2.NeutronDbPluginV2,
portbindings_db.PortBindingMixin,
external_net_db.External_net_db_mixin,
l3_gwmode_db.L3_NAT_db_mixin,
agentschedulers_db.DhcpAgentSchedulerDbMixin,
securitygroups_db.SecurityGroupDbMixin,
rsi_db.RoutedServiceInsertionDbMixin,
loadbalancer_db.LoadBalancerPluginDb,
api.MidoNetApiMixin,
task.MidoClusterMixin):
supported_extension_aliases = ['agent',
'binding',
'bgp',
'cluster',
'chain-rule',
'dhcp_agent_scheduler',
'external-net',
'ip-addr-group',
'license',
'midonet-subnet',
'router',
'host',
'bridge',
'midonet-port',
'midonet-router',
'port-group',
'quotas',
'security-group',
'system',
'routed-service-insertion',
'routing-table',
'vtep',
'lbaas',
'tunnelzone']
__native_bulk_support = True
def __init__(self):
super(MidonetPluginV2, self).__init__()
# Instantiate MidoNet API client
conf = cfg.CONF.MIDONET
neutron_extensions.append_api_extensions_path(extensions.__path__)
self.api_cli = client.MidonetClient(conf.midonet_uri, conf.username,
conf.password,
project_id=conf.project_id)
self.setup_rpc()
self.repair_quotas_table()
self.base_binding_dict = {
portbindings.VIF_TYPE: portbindings.VIF_TYPE_MIDONET,
portbindings.VNIC_TYPE: portbindings.VNIC_NORMAL,
portbindings.VIF_DETAILS: {
# TODO(rkukura): Replace with new VIF security details
portbindings.CAP_PORT_FILTER:
'security-group' in self.supported_extension_aliases}}
self.network_scheduler = importutils.import_object(
cfg.CONF.network_scheduler_driver
)
def setup_rpc(self):
# RPC support
self.topic = topics.PLUGIN
self.conn = n_rpc.create_connection(new=True)
self.endpoints = [dhcp_rpc.DhcpRpcCallback(),
agents_db.AgentExtRpcCallback()]
self.conn.create_consumer(self.topic, self.endpoints,
fanout=False)
# Consume from all consumers in a thread
self.conn.consume_in_threads()
def repair_quotas_table(self):
query = ("CREATE TABLE `quotas` ( `id` varchar(36) NOT NULL, "
"`tenant_id` varchar(255) DEFAULT NULL, "
"`resource` varchar(255) DEFAULT NULL, "
"`limit` int(11) DEFAULT NULL, "
"PRIMARY KEY (`id`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8;")
session = db.get_session()
try:
session.execute(query)
except sa_exc.OperationalError:
# If the table already exists, then this is expected.
pass
def _process_create_network(self, context, network):
net_data = network['network']
tenant_id = self._get_tenant_id_for_create(context, net_data)
net_data['tenant_id'] = tenant_id
self._ensure_default_security_group(context, tenant_id)
with context.session.begin(subtransactions=True):
net = super(MidonetPluginV2, self).create_network(context, network)
self._process_l3_create(context, net, net_data)
return net
@util.handle_api_error
def create_network(self, context, network):
"""Create Neutron network.
Create a new Neutron network and its corresponding MidoNet bridge.
"""
LOG.info(_('MidonetPluginV2.create_network called: network=%r'),
network)
net = self._process_create_network(context, network)
try:
self.api_cli.create_network(net)
except Exception as ex:
LOG.error(_("Failed to create a network %(net_id)s in Midonet:"
"%(err)s"), {"net_id": net["id"], "err": ex})
with excutils.save_and_reraise_exception():
super(MidonetPluginV2, self).delete_network(context, net['id'])
LOG.info(_("MidonetPluginV2.create_network exiting: net=%r"), net)
return net
@util.handle_api_error
def update_network(self, context, id, network):
"""Update Neutron network.
Update an existing Neutron network and its corresponding MidoNet
bridge.
"""
LOG.info(_("MidonetPluginV2.update_network called: id=%(id)r, "
"network=%(network)r"), {'id': id, 'network': network})
with context.session.begin(subtransactions=True):
net = super(MidonetPluginV2, self).update_network(
context, id, network)
self._process_l3_update(context, net, network['network'])
self.api_cli.update_network(id, net)
LOG.info(_("MidonetPluginV2.update_network exiting: net=%r"), net)
return net
@util.handle_api_error
@utils.synchronized('midonet-critical-section', external=True)
def delete_network(self, context, id):
"""Delete a network and its corresponding MidoNet bridge."""
LOG.info(_("MidonetPluginV2.delete_network called: id=%r"), id)
with context.session.begin(subtransactions=True):
self._process_l3_delete(context, id)
super(MidonetPluginV2, self).delete_network(context, id)
self.api_cli.delete_network(id)
LOG.info(_("MidonetPluginV2.delete_network exiting: id=%r"), id)
@util.handle_api_error
def create_subnet(self, context, subnet):
"""Create Neutron subnet.
Creates a Neutron subnet and a DHCP entry in MidoNet bridge.
"""
LOG.info(_("MidonetPluginV2.create_subnet called: subnet=%r"), subnet)
sn_entry = super(MidonetPluginV2, self).create_subnet(context, subnet)
try:
self.api_cli.create_subnet(sn_entry)
except Exception as ex:
LOG.error(_("Failed to create a subnet %(s_id)s in Midonet:"
"%(err)s"), {"s_id": sn_entry["id"], "err": ex})
with excutils.save_and_reraise_exception():
super(MidonetPluginV2, self).delete_subnet(context,
sn_entry['id'])
LOG.info(_("MidonetPluginV2.create_subnet exiting: sn_entry=%r"),
sn_entry)
return sn_entry
@util.handle_api_error
def delete_subnet(self, context, id):
"""Delete Neutron subnet.
Delete neutron network and its corresponding MidoNet bridge.
"""
LOG.info(_("MidonetPluginV2.delete_subnet called: id=%s"), id)
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_subnet(context, id)
self.api_cli.delete_subnet(id)
LOG.info(_("MidonetPluginV2.delete_subnet exiting"))
@util.handle_api_error
def update_subnet(self, context, id, subnet):
"""Update the subnet with new info.
"""
LOG.info(_("MidonetPluginV2.update_subnet called: id=%s"), id)
with context.session.begin(subtransactions=True):
s = super(MidonetPluginV2, self).update_subnet(context, id, subnet)
self.api_cli.update_subnet(id, s)
return s
def _process_create_port(self, context, port):
"""Create a L2 port in Neutron/MidoNet."""
port_data = port['port']
with context.session.begin(subtransactions=True):
# Create a Neutron port
new_port = super(MidonetPluginV2, self).create_port(context, port)
# Make sure that the port created is valid
if "id" not in new_port:
raise n_exc.BadRequest(resource='port',
msg="Invalid port created")
# Update fields
port_data.update(new_port)
# Bind security groups to the port
self._ensure_default_security_group_on_port(context, port)
sg_ids = self._get_security_groups_on_port(context, port)
self._process_port_create_security_group(context, new_port, sg_ids)
self._process_portbindings_create_and_update(context, port_data,
new_port)
return new_port
@util.handle_api_error
@utils.synchronized('midonet-critical-section', external=True)
def create_port(self, context, port):
"""Create a L2 port in Neutron/MidoNet."""
LOG.info(_("MidonetPluginV2.create_port called: port=%r"), port)
new_port = self._process_create_port(context, port)
try:
self.api_cli.create_port(new_port)
except Exception as ex:
LOG.error(_("Failed to create a port %(new_port)s: %(err)s"),
{"new_port": new_port, "err": ex})
with excutils.save_and_reraise_exception():
super(MidonetPluginV2, self).delete_port(context,
new_port['id'])
LOG.info(_("MidonetPluginV2.create_port exiting: port=%r"), new_port)
return new_port
@util.handle_api_error
@utils.synchronized('midonet-critical-section', external=True)
def delete_port(self, context, id, l3_port_check=True):
"""Delete a neutron port and corresponding MidoNet bridge port."""
LOG.info(_("MidonetPluginV2.delete_port called: id=%(id)s "
"l3_port_check=%(l3_port_check)r"),
{'id': id, 'l3_port_check': l3_port_check})
# if needed, check to see if this is a port owned by
# and l3-router. If so, we should prevent deletion.
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).disassociate_floatingips(
context, id, do_notify=False)
super(MidonetPluginV2, self).delete_port(context, id)
self.api_cli.delete_port(id)
LOG.info(_("MidonetPluginV2.delete_port exiting: id=%r"), id)
def _process_port_update(self, context, id, in_port, out_port):
has_sg = self._check_update_has_security_groups(in_port)
delete_sg = self._check_update_deletes_security_groups(in_port)
if delete_sg or has_sg:
# delete the port binding and read it with the new rules.
self._delete_port_security_group_bindings(context, id)
sg_ids = self._get_security_groups_on_port(context, in_port)
self._process_port_create_security_group(context, out_port, sg_ids)
@util.handle_api_error
def update_port(self, context, id, port):
"""Handle port update, including security groups and fixed IPs."""
LOG.info(_("MidonetPluginV2.update_port called: id=%(id)s "
"port=%(port)r"), {'id': id, 'port': port})
with context.session.begin(subtransactions=True):
# update the port DB
p = super(MidonetPluginV2, self).update_port(context, id, port)
self._process_port_update(context, id, port, p)
self._process_portbindings_create_and_update(context,
port['port'], p)
self.api_cli.update_port(id, p)
LOG.info(_("MidonetPluginV2.update_port exiting: p=%r"), p)
return p
@util.handle_api_error
def create_router(self, context, router):
"""Handle router creation.
When a new Neutron router is created, its corresponding MidoNet router
is also created. In MidoNet, this router is initialized with chains
for inbound and outbound traffic, which will be used to hold other
chains that include various rules, such as NAT.
:param router: Router information provided to create a new router.
"""
LOG.info(_("MidonetPluginV2.create_router called: router=%(router)s"),
{"router": router})
r = super(MidonetPluginV2, self).create_router(context, router)
try:
self.api_cli.create_router(r)
except Exception as ex:
LOG.error(_("Failed to create a router %(r_id)s in Midonet:"
"%(err)s"), {"r_id": r["id"], "err": ex})
with excutils.save_and_reraise_exception():
super(MidonetPluginV2, self).delete_router(context, r['id'])
LOG.info(_("MidonetPluginV2.create_router exiting: "
"router=%(router)s."), {"router": r})
return r
@util.handle_api_error
def update_router(self, context, id, router):
"""Handle router updates."""
LOG.info(_("MidonetPluginV2.update_router called: id=%(id)s "
"router=%(router)r"), {"id": id, "router": router})
with context.session.begin(subtransactions=True):
r = super(MidonetPluginV2, self).update_router(context, id, router)
self.api_cli.update_router(id, r)
LOG.info(_("MidonetPluginV2.update_router exiting: router=%r"), r)
return r
@util.handle_api_error
def delete_router(self, context, id):
"""Handler for router deletion.
Deleting a router on Neutron simply means deleting its corresponding
router in MidoNet.
:param id: router ID to remove
"""
LOG.info(_("MidonetPluginV2.delete_router called: id=%s"), id)
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_router(context, id)
self.api_cli.delete_router(id)
LOG.info(_("MidonetPluginV2.delete_router exiting: id=%s"), id)
@util.handle_api_error
def add_router_interface(self, context, router_id, interface_info):
"""Handle router linking with network."""
LOG.info(_("MidonetPluginV2.add_router_interface called: "
"router_id=%(router_id)s "
"interface_info=%(interface_info)r"),
{'router_id': router_id, 'interface_info': interface_info})
info = super(MidonetPluginV2, self).add_router_interface(
context, router_id, interface_info)
try:
self.api_cli.add_router_interface(router_id, info)
except Exception:
LOG.error(_("Failed to create MidoNet resources to add router "
"interface. info=%(info)s, router_id=%(router_id)s"),
{"info": info, "router_id": router_id})
with excutils.save_and_reraise_exception():
self.remove_router_interface(context, router_id, info)
LOG.info(_("MidonetPluginV2.add_router_interface exiting: info=%r"),
info)
return info
@util.handle_api_error
def remove_router_interface(self, context, router_id, interface_info):
"""Handle router un-linking with network."""
LOG.info(_("MidonetPluginV2.remove_router_interface called: "
"router_id=%(router_id)s "
"interface_info=%(interface_info)r"),
{'router_id': router_id, 'interface_info': interface_info})
with context.session.begin(subtransactions=True):
info = super(MidonetPluginV2, self).remove_router_interface(
context, router_id, interface_info)
self.api_cli.remove_router_interface(router_id, interface_info)
LOG.info(_("MidonetPluginV2.remove_router_interface exiting: "
"info=%r"), info)
return info
@util.handle_api_error
def create_floatingip(self, context, floatingip):
"""Handle floating IP creation."""
LOG.info(_("MidonetPluginV2.create_floatingip called: ip=%r"),
floatingip)
fip = super(MidonetPluginV2, self).create_floatingip(context,
floatingip)
try:
self.api_cli.create_floating_ip(fip)
except Exception as ex:
LOG.error(_("Failed to create floating ip %(fip)s: %(err)s"),
{"fip": fip, "err": ex})
with excutils.save_and_reraise_exception():
# Try removing the fip
self.delete_floatingip(context, fip['id'])
LOG.info(_("MidonetPluginV2.create_floatingip exiting: fip=%r"),
fip)
return fip
@util.handle_api_error
def delete_floatingip(self, context, id):
"""Handle floating IP deletion."""
LOG.info(_("MidonetPluginV2.delete_floatingip called: id=%s"), id)
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_floatingip(context, id)
self.api_cli.delete_floating_ip(id)
LOG.info(_("MidonetPluginV2.delete_floatingip exiting: id=%r"), id)
@util.handle_api_error
def update_floatingip(self, context, id, floatingip):
"""Handle floating IP association and disassociation."""
LOG.info(_("MidonetPluginV2.update_floatingip called: id=%(id)s "
"floatingip=%(floatingip)s "),
{'id': id, 'floatingip': floatingip})
with context.session.begin(subtransactions=True):
fip = super(MidonetPluginV2, self).update_floatingip(context, id,
floatingip)
# Update status based on association
if fip.get('port_id') is None:
fip['status'] = n_const.FLOATINGIP_STATUS_DOWN
else:
fip['status'] = n_const.FLOATINGIP_STATUS_ACTIVE
self.update_floatingip_status(context, id, fip['status'])
self.api_cli.update_floating_ip(id, fip)
LOG.info(_("MidonetPluginV2.update_floating_ip exiting: fip=%s"), fip)
return fip
@util.handle_api_error
def create_security_group(self, context, security_group, default_sg=False):
"""Create security group.
Create a new security group, including the default security group.
In MidoNet, this means creating a pair of chains, inbound and outbound,
as well as a new port group.
"""
LOG.info(_("MidonetPluginV2.create_security_group called: "
"security_group=%(security_group)s "
"default_sg=%(default_sg)s "),
{'security_group': security_group, 'default_sg': default_sg})
sg = security_group.get('security_group')
tenant_id = self._get_tenant_id_for_create(context, sg)
if not default_sg:
self._ensure_default_security_group(context, tenant_id)
# Create the Neutron sg first
sg = super(MidonetPluginV2, self).create_security_group(
context, security_group, default_sg)
try:
# Process the MidoNet side
self.api_cli.create_security_group(sg)
except Exception:
LOG.error(_("Failed to create MidoNet resources for sg %(sg)r"),
{"sg": sg})
with excutils.save_and_reraise_exception():
super(MidonetPluginV2, self).delete_security_group(context,
sg['id'])
LOG.info(_("MidonetPluginV2.create_security_group exiting: sg=%r"), sg)
return sg
@util.handle_api_error
def delete_security_group(self, context, id):
"""Delete chains for Neutron security group."""
LOG.info(_("MidonetPluginV2.delete_security_group called: id=%s"), id)
sg = super(MidonetPluginV2, self).get_security_group(context, id)
if not sg:
raise ext_sg.SecurityGroupNotFound(id=id)
if sg["name"] == 'default' and not context.is_admin:
raise ext_sg.SecurityGroupCannotRemoveDefault()
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_security_group(context, id)
self.api_cli.delete_security_group(id)
LOG.info(_("MidonetPluginV2.delete_security_group exiting: id=%r"), id)
@util.handle_api_error
def create_security_group_rule(self, context, security_group_rule):
"""Create a security group rule
Create a security group rule in the Neutron DB and corresponding
MidoNet resources in its data store.
"""
LOG.info(_("MidonetPluginV2.create_security_group_rule called: "
"security_group_rule=%(security_group_rule)r"),
{'security_group_rule': security_group_rule})
rule = super(MidonetPluginV2, self).create_security_group_rule(
context, security_group_rule)
try:
self.api_cli.create_security_group_rule(rule)
except Exception as ex:
LOG.error(_('Failed to create security group rule %(sg)s,'
'error: %(err)s'), {'sg': rule, 'err': ex})
with excutils.save_and_reraise_exception():
super(MidonetPluginV2, self).delete_security_group_rule(
context, rule['id'])
LOG.info(_("MidonetPluginV2.create_security_group_rule exiting: "
"rule=%r"), rule)
return rule
@util.handle_api_error
def create_security_group_rule_bulk(self, context, security_group_rules):
"""Create multiple security group rules
Create multiple security group rules in the Neutron DB and
corresponding MidoNet resources in its data store.
"""
LOG.info(_("MidonetPluginV2.create_security_group_rule_bulk called: "
"security_group_rules=%(security_group_rules)r"),
{'security_group_rules': security_group_rules})
rules = super(
MidonetPluginV2, self).create_security_group_rule_bulk_native(
context, security_group_rules)
try:
self.api_cli.create_security_group_rule_bulk(rules)
except Exception as ex:
LOG.error(_("Failed to create bulk security group rules %(sg)s, "
"error: %(err)s"), {"sg": rules, "err": ex})
with excutils.save_and_reraise_exception():
for rule in rules:
super(MidonetPluginV2, self).delete_security_group_rule(
context, rule['id'])
LOG.info(_("MidonetPluginV2.create_security_group_rule_bulk exiting: "
"rules=%r"), rules)
return rules
@util.handle_api_error
def delete_security_group_rule(self, context, sg_rule_id):
"""Delete a security group rule
Delete a security group rule from the Neutron DB and corresponding
MidoNet resources from its data store.
"""
LOG.info(_("MidonetPluginV2.delete_security_group_rule called: "
"sg_rule_id=%s"), sg_rule_id)
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_security_group_rule(context,
sg_rule_id)
self.api_cli.delete_security_group_rule(sg_rule_id)
LOG.info(_("MidonetPluginV2.delete_security_group_rule exiting: "
"id=%r"), id)
@util.handle_api_error
def create_vip(self, context, vip):
LOG.debug("MidonetPluginV2.create_vip called: %(vip)r",
{'vip': vip})
with context.session.begin(subtransactions=True):
v = super(MidonetPluginV2, self).create_vip(context, vip)
self.api_cli.create_vip(v)
v['status'] = constants.ACTIVE
self.update_status(context, loadbalancer_db.Vip, v['id'],
v['status'])
LOG.debug("MidonetPluginV2.create_vip exiting: id=%r", v['id'])
return v
@util.handle_api_error
def delete_vip(self, context, id):
LOG.debug("MidonetPluginV2.delete_vip called: id=%(id)r",
{'id': id})
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_vip(context, id)
self.api_cli.delete_vip(id)
LOG.debug("MidonetPluginV2.delete_vip existing: id=%(id)r",
{'id': id})
@util.handle_api_error
def update_vip(self, context, id, vip):
LOG.debug("MidonetPluginV2.update_vip called: id=%(id)r, "
"vip=%(vip)r", {'id': id, 'vip': vip})
with context.session.begin(subtransactions=True):
v = super(MidonetPluginV2, self).update_vip(context, id, vip)
self.api_cli.update_vip(id, v)
LOG.debug("MidonetPluginV2.update_vip exiting: id=%(id)r, "
"vip=%(vip)r", {'id': id, 'vip': v})
return v
@util.handle_api_error
def create_pool(self, context, pool):
LOG.debug("MidonetPluginV2.create_pool called: %(pool)r",
{'pool': pool})
router_id = pool['pool'].get(rsi.ROUTER_ID)
if not router_id:
msg = _("router_id is required for pool")
raise n_exc.BadRequest(resource='router', msg=msg)
if self._get_resource_router_id_binding(context, loadbalancer_db.Pool,
router_id=router_id):
msg = _("A pool is already associated with the router")
raise n_exc.BadRequest(resource='router', msg=msg)
with context.session.begin(subtransactions=True):
p = super(MidonetPluginV2, self).create_pool(context, pool)
res = {
'id': p['id'],
rsi.ROUTER_ID: router_id
}
self._process_create_resource_router_id(context, res,
loadbalancer_db.Pool)
p[rsi.ROUTER_ID] = router_id
self.api_cli.create_pool(p)
p['status'] = constants.ACTIVE
self.update_status(context, loadbalancer_db.Pool, p['id'],
p['status'])
LOG.debug("MidonetPluginV2.create_pool exiting: %(pool)r",
{'pool': p})
return p
@util.handle_api_error
def update_pool(self, context, id, pool):
LOG.debug("MidonetPluginV2.update_pool called: id=%(id)r, "
"pool=%(pool)r", {'id': id, 'pool': pool})
with context.session.begin(subtransactions=True):
p = super(MidonetPluginV2, self).update_pool(context, id, pool)
self.api_cli.update_pool(id, p)
LOG.debug("MidonetPluginV2.update_pool exiting: id=%(id)r, "
"pool=%(pool)r", {'id': id, 'pool': pool})
return p
@util.handle_api_error
def delete_pool(self, context, id):
LOG.debug("MidonetPluginV2.delete_pool called: %(id)r", {'id': id})
with context.session.begin(subtransactions=True):
self._delete_resource_router_id_binding(context, id,
loadbalancer_db.Pool)
super(MidonetPluginV2, self).delete_pool(context, id)
self.api_cli.delete_pool(id)
LOG.debug("MidonetPluginV2.delete_pool exiting: %(id)r", {'id': id})
@util.handle_api_error
def create_member(self, context, member):
LOG.debug("MidonetPluginV2.create_member called: %(member)r",
{'member': member})
with context.session.begin(subtransactions=True):
m = super(MidonetPluginV2, self).create_member(context, member)
self.api_cli.create_member(m)
m['status'] = constants.ACTIVE
self.update_status(context, loadbalancer_db.Member, m['id'],
m['status'])
LOG.debug("MidonetPluginV2.create_member exiting: %(member)r",
{'member': m})
return m
@util.handle_api_error
def update_member(self, context, id, member):
LOG.debug("MidonetPluginV2.update_member called: id=%(id)r, "
"member=%(member)r", {'id': id, 'member': member})
with context.session.begin(subtransactions=True):
m = super(MidonetPluginV2, self).update_member(context, id, member)
self.api_cli.update_member(id, m)
LOG.debug("MidonetPluginV2.update_member exiting: id=%(id)r, "
"member=%(member)r", {'id': id, 'member': m})
return m
@util.handle_api_error
def delete_member(self, context, id):
LOG.debug("MidonetPluginV2.delete_member called: %(id)r",
{'id': id})
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_member(context, id)
self.api_cli.delete_member(id)
LOG.debug("MidonetPluginV2.delete_member exiting: %(id)r",
{'id': id})
@util.handle_api_error
def create_health_monitor(self, context, health_monitor):
LOG.debug("MidonetPluginV2.create_health_monitor called: "
" %(health_monitor)r", {'health_monitor': health_monitor})
with context.session.begin(subtransactions=True):
hm = super(MidonetPluginV2, self).create_health_monitor(
context, health_monitor)
self.api_cli.create_health_monitor(hm)
LOG.debug("MidonetPluginV2.create_health_monitor exiting: "
"%(health_monitor)r", {'health_monitor': hm})
return hm
@util.handle_api_error
def update_health_monitor(self, context, id, health_monitor):
LOG.debug("MidonetPluginV2.update_health_monitor called: id=%(id)r, "
"health_monitor=%(health_monitor)r",
{'id': id, 'health_monitor': health_monitor})
with context.session.begin(subtransactions=True):
hm = super(MidonetPluginV2, self).update_health_monitor(
context, id, health_monitor)
self.api_cli.update_health_monitor(id, hm)
LOG.debug("MidonetPluginV2.update_health_monitor exiting: id=%(id)r, "
"health_monitor=%(health_monitor)r",
{'id': id, 'health_monitor': hm})
return hm
@util.handle_api_error
def delete_health_monitor(self, context, id):
LOG.debug("MidonetPluginV2.delete_health_monitor called: %(id)r",
{'id': id})
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_health_monitor(context, id)
self.api_cli.delete_health_monitor(id)
LOG.debug("MidonetPluginV2.delete_health_monitor exiting: %(id)r",
{'id': id})
@util.handle_api_error
def create_pool_health_monitor(self, context, health_monitor, pool_id):
LOG.debug("MidonetPluginV2.create_pool_health_monitor called: "
"hm=%(health_monitor)r, pool_id=%(pool_id)r",
{'health_monitor': health_monitor, 'pool_id': pool_id})
pool = self.get_pool(context, pool_id)
monitors = pool.get('health_monitors')
if len(monitors) > 0:
msg = _("MidoNet right now can only support one monitor per pool")
raise n_exc.BadRequest(resource='pool_health_monitor', msg=msg)
hm = health_monitor['health_monitor']
with context.session.begin(subtransactions=True):
monitors = super(MidonetPluginV2, self).create_pool_health_monitor(
context, health_monitor, pool_id)
self.api_cli.create_pool_health_monitor(hm, pool_id)
LOG.debug("MidonetPluginV2.create_pool_health_monitor exiting: "
"%(health_monitor)r, %(pool_id)r",
{'health_monitor': health_monitor, 'pool_id': pool_id})
return monitors
@util.handle_api_error
def delete_pool_health_monitor(self, context, id, pool_id):
LOG.debug("MidonetPluginV2.delete_pool_health_monitor called: "
"id=%(id)r, pool_id=%(pool_id)r",
{'id': id, 'pool_id': pool_id})
with context.session.begin(subtransactions=True):
super(MidonetPluginV2, self).delete_pool_health_monitor(
context, id, pool_id)
self.api_cli.delete_pool_health_monitor(id, pool_id)
LOG.debug("MidonetPluginV2.delete_pool_health_monitor exiting: "
"%(id)r, %(pool_id)r", {'id': id, 'pool_id': pool_id})
|
midokura/python-neutron-plugin-midonet
|
midonet/neutron/plugin.py
|
Python
|
apache-2.0
| 35,809
|
# TuxTruck_OBDlog_GPS.py
#
# Time-stamp: "2009-08-13 01:04:06 jantman"
#
# +----------------------------------------------------------------------+
# | TuxTruck Project http://tuxtruck.jasonantman.com |
# +----------------------------------------------------------------------+
# | Copyright (c) 2009 Jason Antman. |
# | |
# | This program is free software; you can redistribute it and/or modify |
# | it under the terms of the GNU General Public License as published by |
# | the Free Software Foundation; either version 3 of the License, or |
# | (at your option) any later version. |
# | |
# | This program is distributed in the hope that it will be useful, |
# | but WITHOUT ANY WARRANTY; without even the implied warranty of |
# | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
# | GNU General Public License for more details. |
# | |
# | You should have received a copy of the GNU General Public License |
# | along with this program; if not, write to: |
# | |
# | Free Software Foundation, Inc. |
# | 59 Temple Place - Suite 330 |
# | Boston, MA 02111-1307, USA. |
# +----------------------------------------------------------------------+
# |Please use the above URL for bug reports and feature/support requests.|
# +----------------------------------------------------------------------+
# | Authors: Jason Antman <jason@jasonantman.com> |
# +----------------------------------------------------------------------+
# | $LastChangedRevision:: $ |
# | $HeadURL:: $ |
# +----------------------------------------------------------------------+
import threading
import time
class TuxTruck_OBDlog_GPS(threading.Thread):
"""
Class to read GPSd.
"""
FILE = None
Q = ""
PARENT = None
def __init__(self, parent, q):
"""
Open the port, start reading and buffering.
"""
self.Q = q
self.PARENT = parent
threading.Thread.__init__(self)
def run(self):
"""
Start thread...
"""
while True and self.PARENT.KILLED == False:
#line = self.FILE.readline()
# "GPS",lat,long,heading,speed
line = "GPS,lat,long,heading,speed"
self.Q.append(line)
time.sleep(0.1)
|
jantman/tuxtruck
|
obdlog/TuxTruck_OBDlog_GPS.py
|
Python
|
gpl-3.0
| 2,930
|
from django.urls import path
from tickets import views
urlpatterns = [
path('', views.last_event, name='last_event'),
path('event/<str:ev>/', views.event, name='event'),
path('event/<str:ev>/<str:space>/<str:session>/register/', views.register, name='register'),
path('ticket/<str:order>/payment/', views.payment, name='payment'),
path('ticket/<str:order>/thanks/', views.thanks, name='thanks'),
path('ticket/confirm/', views.confirm, name='confirm'),
path('ticket/confirm/paypal/', views.confirm_paypal, name='confirm_paypal'),
path('ticket/<str:order>/confirm/stripe/', views.confirm_stripe, name='confirm_stripe'),
path('ticket/template/<int:id>/preview/', views.template_preview, name='template_preview'),
path('ticket/email-confirm/<int:id>/preview/', views.email_confirm_preview, name='email_confirm_preview'),
path('<str:ev>/', views.multipurchase, name='multipurchase'),
path('seats/<int:session>/<int:layout>/', views.ajax_layout, name='ajax_layout'),
path('seats/view/<int:map>/', views.seats, name='seats'),
path('seats/auto/', views.autoseats, name='autoseats'),
path('seats/bystr/', views.seats_by_str, name='seats_by_str'),
]
|
wadobo/congressus
|
congressus/tickets/urls.py
|
Python
|
agpl-3.0
| 1,208
|
# -*- coding: utf-8 -*-
"""Copyright 2015 Roger R Labbe Jr.
Code supporting the book
Kalman and Bayesian Filters in Python
https://github.com/rlabbe/Kalman-and-Bayesian-Filters-in-Python
This is licensed under an MIT license. See the LICENSE.txt file
for more information.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from matplotlib import animation
import matplotlib.pyplot as plt
def animate(filename, func, frames, interval, fig=None, figsize=(6.5, 6.5)):
""" Creates an animated GIF of a matplotlib.
Parameters
----------
filename : string
name of the file. E.g 'foo.GIF' or '\home\monty\parrots\fjords.gif'
func : function
function that will be called once per frame. Must have signature of
def fun_name(frame_num)
frames : int
number of frames to animate. The current frame number will be passed
into func at each call.
interval : float
Milliseconds to pause on each frame in the animation. E.g. 500 for half
a second.
figsize : (float, float) optional
size of the figure in inches. Defaults to 6.5" by 6.5"
"""
def init_func():
""" This draws the 'blank' frame of the video. To work around a bug
in matplotlib 1.5.1 (#5399) you must supply an empty init function
for the save to work."""
pass
if fig is None:
fig = plt.figure(figsize=figsize)
anim = animation.FuncAnimation(fig, func, init_func=init_func,
frames=frames, interval=interval)
import os
basename = os.path.splitext(filename)[0]
anim.save(basename + '.mp4', writer='ffmpeg')
os.system("ffmpeg -y -i {}.mp4 {}.gif".format(basename, basename))
os.remove(basename + '.mp4')
|
zaqwes8811/micro-apps
|
self_driving/deps/Kalman_and_Bayesian_Filters_in_Python_master/kf_book/gif_animate.py
|
Python
|
mit
| 1,828
|
"""
This is a DIRAC WMS administrator interface.
It exposes the following methods:
Site mask related methods:
setMask(<site mask>)
getMask()
Access to the pilot data:
getWMSStats()
"""
__RCSID__ = "$Id$"
from types import DictType, ListType, IntType, LongType, StringTypes, StringType, FloatType
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC import gConfig, gLogger, S_OK, S_ERROR
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
from DIRAC.WorkloadManagementSystem.DB.PilotAgentsDB import PilotAgentsDB
from DIRAC.WorkloadManagementSystem.DB.TaskQueueDB import TaskQueueDB
from DIRAC.WorkloadManagementSystem.Service.WMSUtilities import getPilotLoggingInfo, getPilotOutput
from DIRAC.Resources.Computing.ComputingElementFactory import ComputingElementFactory
import DIRAC.Core.Utilities.Time as Time
from DIRAC.ConfigurationSystem.Client.Helpers.Registry import getGroupOption, getUsernameForDN
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getQueue
# This is a global instance of the database classes
jobDB = False
pilotDB = False
taskQueueDB = False
FINAL_STATES = ['Done','Aborted','Cleared','Deleted','Stalled']
def initializeWMSAdministratorHandler( serviceInfo ):
""" WMS AdministratorService initialization
"""
global jobDB
global pilotDB
global taskQueueDB
jobDB = JobDB()
pilotDB = PilotAgentsDB()
taskQueueDB = TaskQueueDB()
return S_OK()
class WMSAdministratorHandler(RequestHandler):
###########################################################################
types_setSiteMask = [ListType]
def export_setSiteMask( self, siteList ):
""" Set the site mask for matching. The mask is given in a form of Classad string.
"""
result = self.getRemoteCredentials()
dn = result['DN']
maskList = [ (site,'Active') for site in siteList ]
result = jobDB.setSiteMask( maskList, dn, 'No comment' )
return result
##############################################################################
types_getSiteMask = []
def export_getSiteMask(self):
""" Get the site mask
"""
return jobDB.getSiteMask( 'Active' )
##############################################################################
types_banSite = [StringTypes]
def export_banSite(self, site,comment='No comment'):
""" Ban the given site in the site mask
"""
result = self.getRemoteCredentials()
dn = result['DN']
result = getUsernameForDN(dn)
if result['OK']:
author = result['Value']
else:
author = dn
result = jobDB.banSiteInMask(site,author,comment)
return result
##############################################################################
types_allowSite = [StringTypes]
def export_allowSite(self,site,comment='No comment'):
""" Allow the given site in the site mask
"""
result = self.getRemoteCredentials()
dn = result['DN']
result = getUsernameForDN(dn)
if result['OK']:
author = result['Value']
else:
author = dn
result = jobDB.allowSiteInMask(site,author,comment)
return result
##############################################################################
types_clearMask = []
def export_clearMask(self):
""" Clear up the entire site mask
"""
return jobDB.removeSiteFromMask( None )
##############################################################################
types_getSiteMaskLogging = [ list( StringTypes ) + [ListType] ]
def export_getSiteMaskLogging( self, sites ):
""" Get the site mask logging history
"""
if type(sites) in StringTypes:
sites = [sites]
return jobDB.getSiteMaskLogging( sites )
##############################################################################
types_getSiteMaskSummary = [ ]
def export_getSiteMaskSummary(self):
""" Get the mask status for all the configured sites
"""
# Get all the configured site names
result = gConfig.getSections('/Resources/Sites')
if not result['OK']:
return result
grids = result['Value']
sites = []
for grid in grids:
result = gConfig.getSections('/Resources/Sites/%s' % grid)
if not result['OK']:
return result
sites += result['Value']
# Get the current mask status
result = jobDB.getSiteMaskStatus()
siteDict = result['Value']
for site in sites:
if site not in siteDict:
siteDict[site] = 'Unknown'
return S_OK(siteDict)
##############################################################################
types_getCurrentPilotCounters = [ DictType ]
def export_getCurrentPilotCounters( self, attrDict={}):
""" Get pilot counters per Status with attrDict selection. Final statuses are given for
the last day.
"""
result = pilotDB.getCounters( 'PilotAgents',['Status'], attrDict, timeStamp='LastUpdateTime')
if not result['OK']:
return result
last_update = Time.dateTime() - Time.day
resultDay = pilotDB.getCounters( 'PilotAgents',['Status'], attrDict, newer=last_update,
timeStamp='LastUpdateTime')
if not resultDay['OK']:
return resultDay
resultDict = {}
for statusDict, count in result['Value']:
status = statusDict['Status']
resultDict[status] = count
if status in FINAL_STATES:
resultDict[status] = 0
for statusDayDict,ccount in resultDay['Value']:
if status == statusDayDict['Status']:
resultDict[status] = ccount
break
return S_OK(resultDict)
##########################################################################################
types_addPilotTQReference = [ ListType, [IntType, LongType], StringTypes, StringTypes ]
def export_addPilotTQReference( self, pilotRef, taskQueueID, ownerDN, ownerGroup, broker='Unknown',
gridType='DIRAC', requirements='Unknown',pilotStampDict={}):
""" Add a new pilot job reference """
return pilotDB.addPilotTQReference(pilotRef, taskQueueID,
ownerDN, ownerGroup,
broker, gridType, requirements,pilotStampDict)
##############################################################################
types_getPilotOutput = [StringTypes]
def export_getPilotOutput(self,pilotReference):
""" Get the pilot job standard output and standard error files for the Grid
job reference
"""
return self.__getGridJobOutput(pilotReference)
##############################################################################
types_getPilotInfo = [ list(StringTypes)+[ListType] ]
def export_getPilotInfo(self,pilotReference):
""" Get the info about a given pilot job reference
"""
return pilotDB.getPilotInfo(pilotReference)
##############################################################################
types_selectPilots = [ DictType ]
def export_selectPilots(self,condDict):
""" Select pilots given the selection conditions
"""
return pilotDB.selectPilots(condDict)
##############################################################################
types_storePilotOutput = [ StringTypes,StringTypes,StringTypes ]
def export_storePilotOutput(self,pilotReference,output,error):
""" Store the pilot output and error
"""
return pilotDB.storePilotOutput(pilotReference,output,error)
##############################################################################
types_getPilotLoggingInfo = [StringTypes]
def export_getPilotLoggingInfo(self,pilotReference):
""" Get the pilot logging info for the Grid job reference
"""
result = pilotDB.getPilotInfo(pilotReference)
if not result['OK'] or not result[ 'Value' ]:
return S_ERROR('Failed to determine owner for pilot ' + pilotReference)
pilotDict = result['Value'][pilotReference]
owner = pilotDict['OwnerDN']
group = pilotDict['OwnerGroup']
group = getGroupOption(group,'VOMSRole',group)
ret = gProxyManager.getPilotProxyFromVOMSGroup( owner, group )
if not ret['OK']:
gLogger.error( ret['Message'] )
gLogger.error( 'Could not get proxy:', 'User "%s", Group "%s"' % ( owner, group ) )
return S_ERROR("Failed to get the pilot's owner proxy")
proxy = ret['Value']
gridType = pilotDict['GridType']
return getPilotLoggingInfo( proxy, gridType, pilotReference )
##############################################################################
types_getJobPilotOutput = [[StringType, IntType, LongType]]
def export_getJobPilotOutput(self,jobID):
""" Get the pilot job standard output and standard error files for the DIRAC
job reference
"""
pilotReference = ''
# Get the pilot grid reference first from the job parameters
result = jobDB.getJobParameter( int( jobID ), 'Pilot_Reference' )
if result['OK']:
pilotReference = result['Value']
if not pilotReference:
# Failed to get the pilot reference, try to look in the attic parameters
result = jobDB.getAtticJobParameters( int( jobID ), ['Pilot_Reference'] )
if result['OK']:
c = -1
# Get the pilot reference for the last rescheduling cycle
for cycle in result['Value']:
if cycle > c:
pilotReference = result['Value'][cycle]['Pilot_Reference']
c = cycle
if pilotReference:
return self.__getGridJobOutput(pilotReference)
else:
return S_ERROR('No pilot job reference found')
##############################################################################
def __getGridJobOutput(self,pilotReference):
""" Get the pilot job standard output and standard error files for the Grid
job reference
"""
result = pilotDB.getPilotInfo(pilotReference)
if not result['OK'] or not result[ 'Value' ]:
return S_ERROR('Failed to get info for pilot ' + pilotReference)
pilotDict = result['Value'][pilotReference]
owner = pilotDict['OwnerDN']
group = pilotDict['OwnerGroup']
# FIXME: What if the OutputSandBox is not StdOut and StdErr, what do we do with other files?
result = pilotDB.getPilotOutput(pilotReference)
if result['OK']:
stdout = result['Value']['StdOut']
error = result['Value']['StdErr']
if stdout or error:
resultDict = {}
resultDict['StdOut'] = stdout
resultDict['StdErr'] = error
resultDict['OwnerDN'] = owner
resultDict['OwnerGroup'] = group
resultDict['FileList'] = []
return S_OK(resultDict)
else:
gLogger.warn( 'Empty pilot output found for %s' % pilotReference )
gridType = pilotDict['GridType']
if gridType in ["LCG","gLite","CREAM","ARC"]:
group = getGroupOption(group,'VOMSRole',group)
ret = gProxyManager.getPilotProxyFromVOMSGroup( owner, group )
if not ret['OK']:
gLogger.error( ret['Message'] )
gLogger.error( 'Could not get proxy:', 'User "%s", Group "%s"' % ( owner, group ) )
return S_ERROR("Failed to get the pilot's owner proxy")
proxy = ret['Value']
pilotStamp = pilotDict['PilotStamp']
result = getPilotOutput( proxy, gridType, pilotReference, pilotStamp )
if not result['OK']:
return S_ERROR('Failed to get pilot output: '+result['Message'])
# FIXME: What if the OutputSandBox is not StdOut and StdErr, what do we do with other files?
stdout = result['StdOut']
error = result['StdErr']
fileList = result['FileList']
if stdout:
result = pilotDB.storePilotOutput(pilotReference,stdout,error)
if not result['OK']:
gLogger.error('Failed to store pilot output:',result['Message'])
resultDict = {}
resultDict['StdOut'] = stdout
resultDict['StdErr'] = error
resultDict['OwnerDN'] = owner
resultDict['OwnerGroup'] = group
resultDict['FileList'] = fileList
return S_OK(resultDict)
else:
# Instantiate the appropriate CE
ceFactory = ComputingElementFactory()
result = getQueue( pilotDict['GridSite'], pilotDict['DestinationSite'], pilotDict['Queue'] )
if not result['OK']:
return result
queueDict = result['Value']
result = ceFactory.getCE( gridType, pilotDict['DestinationSite'], queueDict )
if not result['OK']:
return result
ce = result['Value']
pilotStamp = pilotDict['PilotStamp']
pRef = pilotReference
if pilotStamp:
pRef = pRef + ':::' + pilotStamp
result = ce.getJobOutput( pRef )
if not result['OK']:
return result
stdout,error = result['Value']
if stdout:
result = pilotDB.storePilotOutput(pilotReference,stdout,error)
if not result['OK']:
gLogger.error('Failed to store pilot output:',result['Message'])
resultDict = {}
resultDict['StdOut'] = stdout
resultDict['StdErr'] = error
resultDict['OwnerDN'] = owner
resultDict['OwnerGroup'] = group
resultDict['FileList'] = []
return S_OK( resultDict )
##############################################################################
types_getPilotSummary = []
def export_getPilotSummary(self,startdate='',enddate=''):
""" Get summary of the status of the LCG Pilot Jobs
"""
result = pilotDB.getPilotSummary(startdate,enddate)
return result
##############################################################################
types_getPilotMonitorWeb = [DictType, ListType, IntType, IntType]
def export_getPilotMonitorWeb(self, selectDict, sortList, startItem, maxItems):
""" Get the summary of the pilot information for a given page in the
pilot monitor in a generic format
"""
result = pilotDB.getPilotMonitorWeb(selectDict, sortList, startItem, maxItems)
return result
##############################################################################
types_getPilotMonitorSelectors = []
def export_getPilotMonitorSelectors(self):
""" Get all the distinct selector values for the Pilot Monitor web portal page
"""
result = pilotDB.getPilotMonitorSelectors()
return result
##############################################################################
types_getPilotSummaryWeb = [DictType, ListType, IntType, IntType]
def export_getPilotSummaryWeb(self, selectDict, sortList, startItem, maxItems):
""" Get the summary of the pilot information for a given page in the
pilot monitor in a generic format
"""
result = pilotDB.getPilotSummaryWeb(selectDict, sortList, startItem, maxItems)
return result
##############################################################################
types_getSiteSummaryWeb = [DictType, ListType, IntType, IntType]
def export_getSiteSummaryWeb(self, selectDict, sortList, startItem, maxItems):
""" Get the summary of the jobs running on sites in a generic format
"""
result = jobDB.getSiteSummaryWeb(selectDict, sortList, startItem, maxItems)
return result
##############################################################################
types_getSiteSummarySelectors = []
def export_getSiteSummarySelectors(self):
""" Get all the distinct selector values for the site summary web portal page
"""
resultDict = {}
statusList = ['Good','Fair','Poor','Bad','Idle']
resultDict['Status'] = statusList
maskStatus = ['Active','Banned','NoMask','Reduced']
resultDict['MaskStatus'] = maskStatus
gridTypes = []
result = gConfig.getSections('Resources/Sites/',[])
if result['OK']:
gridTypes = result['Value']
resultDict['GridType'] = gridTypes
siteList = []
for grid in gridTypes:
result = gConfig.getSections('Resources/Sites/%s' % grid,[])
if result['OK']:
siteList += result['Value']
countryList = []
for site in siteList:
if site.find('.') != -1:
country = site.split('.')[2].lower()
if country not in countryList:
countryList.append(country)
countryList.sort()
resultDict['Country'] = countryList
siteList.sort()
resultDict['Site'] = siteList
return S_OK(resultDict)
##############################################################################
types_getPilots = [[StringType, IntType, LongType]]
def export_getPilots(self,jobID):
""" Get pilot references and their states for :
- those pilots submitted for the TQ where job is sitting
- (or) the pilots executing/having executed the Job
"""
pilots = []
result = pilotDB.getPilotsForJobID( int( jobID ) )
if not result['OK']:
if result['Message'].find('not found') == -1:
return S_ERROR('Failed to get pilot: '+result['Message'])
else:
pilots += result['Value']
if not pilots:
# Pilots were not found try to look in the Task Queue
taskQueueID = 0
result = taskQueueDB.getTaskQueueForJob( int( jobID ) )
if result['OK'] and result['Value']:
taskQueueID = result['Value']
if taskQueueID:
result = pilotDB.getPilotsForTaskQueue( taskQueueID, limit=10 )
if not result['OK']:
return S_ERROR('Failed to get pilot: '+result['Message'])
pilots += result['Value']
if not pilots:
return S_ERROR( 'Failed to get pilot for Job %d' % int( jobID ) )
return pilotDB.getPilotInfo(pilotID=pilots)
##############################################################################
types_killPilot = [ list(StringTypes)+[ListType] ]
def export_killPilot(self, pilotRefList ):
""" Kill the specified pilots
"""
# Make a list if it is not yet
pilotRefs = list( pilotRefList )
if type( pilotRefList ) in StringTypes:
pilotRefs = [pilotRefList]
# Regroup pilots per site and per owner
pilotRefDict = {}
for pilotReference in pilotRefs:
result = pilotDB.getPilotInfo(pilotReference)
if not result['OK'] or not result[ 'Value' ]:
return S_ERROR('Failed to get info for pilot ' + pilotReference)
pilotDict = result['Value'][pilotReference]
owner = pilotDict['OwnerDN']
group = pilotDict['OwnerGroup']
queue = '@@@'.join( [owner, group, pilotDict['GridSite'], pilotDict['DestinationSite'], pilotDict['Queue']] )
gridType = pilotDict['GridType']
pilotRefDict.setdefault( queue, {} )
pilotRefDict[queue].setdefault( 'PilotList', [] )
pilotRefDict[queue]['PilotList'].append( pilotReference )
pilotRefDict[queue]['GridType'] = gridType
# Do the work now queue by queue
ceFactory = ComputingElementFactory()
failed = []
for key, pilotDict in pilotRefDict.items():
owner,group,site,ce,queue = key.split( '@@@' )
result = getQueue( site, ce, queue )
if not result['OK']:
return result
queueDict = result['Value']
gridType = pilotDict['GridType']
result = ceFactory.getCE( gridType, ce, queueDict )
if not result['OK']:
return result
ce = result['Value']
# FIXME: quite hacky. Should be either removed, or based on some flag
if gridType in ["LCG", "gLite", "CREAM", 'ARC']:
group = getGroupOption(group,'VOMSRole',group)
ret = gProxyManager.getPilotProxyFromVOMSGroup( owner, group )
if not ret['OK']:
gLogger.error( ret['Message'] )
gLogger.error( 'Could not get proxy:', 'User "%s", Group "%s"' % ( owner, group ) )
return S_ERROR("Failed to get the pilot's owner proxy")
proxy = ret['Value']
ce.setProxy( proxy )
pilotList = pilotDict['PilotList']
result = ce.killJob( pilotList )
if not result['OK']:
failed.extend( pilotList )
if failed:
return S_ERROR('Failed to kill at least some pilots')
return S_OK()
##############################################################################
types_setJobForPilot = [ [StringType, IntType, LongType], StringTypes]
def export_setJobForPilot(self,jobID,pilotRef,destination=None):
""" Report the DIRAC job ID which is executed by the given pilot job
"""
result = pilotDB.setJobForPilot( int( jobID ), pilotRef )
if not result['OK']:
return result
result = pilotDB.setCurrentJobID( pilotRef, int( jobID ) )
if not result['OK']:
return result
if destination:
result = pilotDB.setPilotDestinationSite(pilotRef,destination)
return result
##########################################################################################
types_setPilotBenchmark = [StringTypes, FloatType]
def export_setPilotBenchmark(self,pilotRef,mark):
""" Set the pilot agent benchmark
"""
result = pilotDB.setPilotBenchmark(pilotRef,mark)
return result
##########################################################################################
types_setAccountingFlag = [StringTypes]
def export_setAccountingFlag(self,pilotRef,mark='True'):
""" Set the pilot AccountingSent flag
"""
result = pilotDB.setAccountingFlag(pilotRef,mark)
return result
##########################################################################################
types_setPilotStatus = [StringTypes, StringTypes]
def export_setPilotStatus(self,pilotRef,status,destination=None,reason=None,gridSite=None,queue=None):
""" Set the pilot agent status
"""
result = pilotDB.setPilotStatus(pilotRef,status,destination=destination,
statusReason=reason,gridSite=gridSite,queue=queue)
return result
##########################################################################################
types_countPilots = [ DictType ]
def export_countPilots(self,condDict, older=None, newer=None, timeStamp='SubmissionTime'):
""" Set the pilot agent status
"""
result = pilotDB.countPilots(condDict, older, newer, timeStamp )
return result
##########################################################################################
types_getCounters = [ StringTypes, ListType, DictType ]
def export_getCounters(self, table, keys, condDict, newer=None, timeStamp='SubmissionTime'):
""" Set the pilot agent status
"""
result = pilotDB.getCounters( table, keys, condDict, newer=newer, timeStamp=timeStamp )
return result
##############################################################################
types_getPilotStatistics = [ StringTypes, DictType ]
@staticmethod
def export_getPilotStatistics ( attribute, selectDict ):
""" Get pilot statistics distribution per attribute value with a given selection
"""
startDate = selectDict.get( 'FromDate', None )
if startDate:
del selectDict['FromDate']
if startDate is None:
startDate = selectDict.get( 'LastUpdate', None )
if startDate:
del selectDict['LastUpdate']
endDate = selectDict.get( 'ToDate', None )
if endDate:
del selectDict['ToDate']
result = pilotDB.getCounters( 'PilotAgents', [attribute], selectDict,
newer = startDate,
older = endDate,
timeStamp = 'LastUpdateTime' )
statistics = {}
if result['OK']:
for status, count in result['Value']:
if "OwnerDN" in status:
userName = getUsernameForDN( status['OwnerDN'] )
if userName['OK']:
status['OwnerDN'] = userName['Value']
statistics[status['OwnerDN']] = count
return S_OK( statistics )
|
marcelovilaca/DIRAC
|
WorkloadManagementSystem/Service/WMSAdministratorHandler.py
|
Python
|
gpl-3.0
| 23,672
|
#To treat the current directory as a module
|
swvist/Ocrn
|
ocrn/__init__.py
|
Python
|
mit
| 44
|
from psycopg2._psycopg import adapt
class StringUtils(object):
@staticmethod
def adapt_to_str_for_orm(value):
value = (value.replace('%', '')
.replace(':', '')
)
adapted_value = adapt(value)
return adapted_value.getquoted()[1:-1]
|
LittleRichard/sormtger
|
server/utils/StringUtils.py
|
Python
|
gpl-3.0
| 305
|
'''
Created on Sep 28, 2016
@author: rtorres
'''
AREAS = ('Policies', 'Billing', 'Claims', 'Reports')
|
rafasis1986/EngineeringMidLevel
|
flaskiwsapp/projects/snippets/constants.py
|
Python
|
mit
| 104
|
"""
Test lldb data formatter subsystem.
"""
import os, time
import unittest2
import lldb
from lldbtest import *
import lldbutil
class ValueObjectRecursionTestCase(TestBase):
mydir = os.path.join("functionalities", "recursion")
@unittest2.skipUnless(sys.platform.startswith("darwin"), "requires Darwin")
@dsym_test
def test_with_dsym_and_run_command(self):
"""Test that deeply nested ValueObjects still work."""
self.buildDsym()
self.recursive_vo_commands()
@dwarf_test
def test_with_dwarf_and_run_command(self):
"""Test that deeply nested ValueObjects still work."""
self.buildDwarf()
self.recursive_vo_commands()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break at.
self.line = line_number('main.cpp', '// Set break point at this line.')
def recursive_vo_commands(self):
"""Test that that file and class static variables display correctly."""
self.runCmd("file a.out", CURRENT_EXECUTABLE_SET)
lldbutil.run_break_set_by_file_and_line (self, "main.cpp", self.line, num_expected_locations=1, loc_exact=True)
self.runCmd("run", RUN_SUCCEEDED)
# The stop reason of the thread should be breakpoint.
self.expect("thread list", STOPPED_DUE_TO_BREAKPOINT,
substrs = ['stopped',
'stop reason = breakpoint'])
# This is the function to remove the custom formats in order to have a
# clean slate for the next test case.
def cleanup():
self.runCmd('type format clear', check=False)
self.runCmd('type summary clear', check=False)
# Execute the cleanup function during test case tear down.
self.addTearDownHook(cleanup)
root = self.frame().FindVariable("root")
child = root.GetChildAtIndex(1)
if self.TraceOn():
print root
print child
for i in range(0,24500):
child = child.GetChildAtIndex(1)
if self.TraceOn():
print child
self.assertTrue(child.IsValid(),"could not retrieve the deep ValueObject")
self.assertTrue(child.GetChildAtIndex(0).IsValid(),"the deep ValueObject has no value")
self.assertTrue(child.GetChildAtIndex(0).GetValueAsUnsigned() != 0,"the deep ValueObject has a zero value")
self.assertTrue(child.GetChildAtIndex(1).GetValueAsUnsigned() != 0, "the deep ValueObject has no next")
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
|
s20121035/rk3288_android5.1_repo
|
external/lldb/test/functionalities/recursion/TestValueObjectRecursion.py
|
Python
|
gpl-3.0
| 2,682
|
#!/usr/bin/python
# -*- coding: latin2 -*-
import sys
class traffic:
def __init__(self, rawStr):
(self.Down, self.Up) = [ int(x) for x in rawStr.split(":") ]
self.Total = self.Down + self.Up
def printColumns(columns):
print "\t".join([str(x) for x in columns])
data = open(sys.argv[1]).readlines()
for monthLine in data:
(monthHead, monthBody) = monthLine.split("=")
(traffMark, monthNr, year) = monthHead.split("-")
monthEntries = [ x.strip() for x in monthBody.split(" ") ]
monthTotal = traffic(monthEntries[-1].strip("[]"))
allDaysRaw = monthEntries[:-1]
allDays = [ traffic(x) for x in allDaysRaw ]
for dayNr in range(len(allDays)):
day = allDays[dayNr]
columns = (year, monthNr, 1+dayNr, day.Down, day.Up, day.Total,)
printColumns(columns)
totalDown = sum([ day.Down for day in allDays ])
totalUp = sum([ day.Up for day in allDays ])
if (totalDown != monthTotal.Down or totalUp != monthTotal.Up):
toDump = (totalDown, totalUp, monthTotal.Down, monthTotal.Up,)
dumpMessage = "Total of %d:%d does not match value at EOL: %d:%d" % toDump
raise Exception(dumpMessage)
|
szabopeter/traff2tsv
|
traffproc.py
|
Python
|
mit
| 1,218
|
# coding: UTF-8
from __future__ import absolute_import
from unittest import TestCase
import six
if six.PY2:
from StringIO import StringIO
else:
from io import StringIO
from fixedwidthwriter import FixedWidthWriter
class TestFixedWidthWriter(TestCase):
def setUp(self):
self.s = StringIO()
self.objs = [
{'ID': 0, 'NAME': 'Jack', 'BALANCE': 100.50},
{'ID': 1, 'NAME': 'Mary', 'BALANCE': 100},
{'ID': 2, 'NAME': u'João', 'BALANCE': 100.25},
]
self.cols = [
('ID', 2),
('NAME', 5),
]
def test_fixed_width_writer(self):
self.cols.append(('BALANCE', 10, {'direction': '>',
'decimal_spaces': 2}))
fww = FixedWidthWriter(self.s, self.cols)
fww.writerows(self.objs)
lines = self.s.getvalue()
if six.PY2:
lines = lines.decode('utf-8')
self.assertIn(u'0 Jack 100.50', lines)
self.assertIn(u'1 Mary 100.00', lines)
self.assertIn(u'2 João 100.25', lines)
def test_fixed_width_writer_direction(self):
self.cols.append(('BALANCE', 10, {'direction': '<',
'decimal_spaces': 2}))
fww = FixedWidthWriter(self.s, self.cols)
fww.writerows(self.objs)
lines = self.s.getvalue()
if six.PY2:
lines = lines.decode('utf-8')
self.assertIn(u'0 Jack 100.50 ', lines)
self.assertIn(u'1 Mary 100.00 ', lines)
self.assertIn(u'2 João 100.25 ', lines)
def test_fixed_width_writer_decimals(self):
self.cols.append(('BALANCE', 10, {'direction': '>',
'decimal_spaces': 0}))
fww = FixedWidthWriter(self.s, self.cols)
fww.writerows(self.objs)
lines = self.s.getvalue()
if six.PY2:
lines = lines.decode('utf-8')
self.assertIn(u'0 Jack 100.5', lines)
self.assertIn(u'1 Mary 100', lines)
self.assertIn(u'2 João 100.25', lines)
def test_py3_binary_files_not_supported(self):
if six.PY2:
self.skipTest('PY3 test only')
with open('file.txt', 'wb') as fixed_file:
fww = FixedWidthWriter(fixed_file, self.cols)
with self.assertRaises(TypeError):
fww.writerows(self.objs)
|
HardDiskD/py-fixedwidthwriter
|
tests/__init__.py
|
Python
|
mit
| 2,435
|
# -*- coding: utf-8 -*-
class Charset(object):
common_name = 'NotoSansMyanmar-Regular'
native_name = ''
def glyphs(self):
glyphs = []
glyphs.append(0x0136) #kinzi
glyphs.append(0x0119) #hha_khm
glyphs.append(0x0198) #lla.sub
glyphs.append(0x01C6) #kha_shn.sub2
glyphs.append(0x0039) #V
glyphs.append(0x0007) #dollar
glyphs.append(0x01FE) #fa_khm.sub2
glyphs.append(0x0207) #_uu
glyphs.append(0x0182) #pa.sub
glyphs.append(0x0017) #four
glyphs.append(0x0215) #_ii_mon
glyphs.append(0x0261) #medial_ra_wa_ha_ovl_tt.w2_5
glyphs.append(0x007F) #ga
glyphs.append(0x0174) #nna_dda
glyphs.append(0x009D) #lla
glyphs.append(0x0229) #medial_ha_u_dot
glyphs.append(0x0259) #medial_ra_wa_ha_ovl_tt
glyphs.append(0x018A) #ma.sub
glyphs.append(0x019F) #e_m.sub2
glyphs.append(0x0257) #medial_ra_wa_ha_ovl
glyphs.append(0x01A3) #sha_skt.sub2
glyphs.append(0x013A) #ra_mark
glyphs.append(0x0281) #medial_ra_wa_ha_tt.w3
glyphs.append(0x0206) #ra_atn.sub2
glyphs.append(0x011D) #za_khm
glyphs.append(0x00A8) #medial_ra
glyphs.append(0x008D) #ta
glyphs.append(0x015A) #ja.sub
glyphs.append(0x01DD) #fa_rpg.sub
glyphs.append(0x0230) #dot_below_spacing
glyphs.append(0x00FE) #three_shn
glyphs.append(0x0033) #P
glyphs.append(0x0147) #medial_ra_tt_bt.w2
glyphs.append(0x0042) #underscore
glyphs.append(0x00BA) #genitive
glyphs.append(0x0085) #jha
glyphs.append(0x0248) #ra.alt2
glyphs.append(0x0188) #bha.sub
glyphs.append(0x0115) #dha_khm
glyphs.append(0x0053) #p
glyphs.append(0x008B) #ddha
glyphs.append(0x0169) #tta.sub2
glyphs.append(0x01B7) #sha_skn.sub
glyphs.append(0x00F0) #_e_above_shn
glyphs.append(0x0162) #lla.notail
glyphs.append(0x01C4) #ka_shn.sub2
glyphs.append(0x011F) #qay_khm
glyphs.append(0x0163) #nnya.sub
glyphs.append(0x0013) #zero
glyphs.append(0x021C) #asat
glyphs.append(0x0134) #kinzi_ii
glyphs.append(0x016F) #dda.sub2
glyphs.append(0x01EB) #tta_khm.sub
glyphs.append(0x010B) #ga_khm
glyphs.append(0x027C) #medial_ra_wa_ha.w1_5
glyphs.append(0x01CF) #da_shn.sub
glyphs.append(0x0068) #section
glyphs.append(0x0265) #medial_ra_bt.w2_5
glyphs.append(0x00B3) #eight_m
glyphs.append(0x0209) #medial_wa_ha
glyphs.append(0x0173) #nna_ttha
glyphs.append(0x0009) #ampersand
glyphs.append(0x019B) #a_m.sub2
glyphs.append(0x00C0) #vocLL_skt
glyphs.append(0x010A) #exclam_shn
glyphs.append(0x018E) #ra.alt
glyphs.append(0x0070) #degree
glyphs.append(0x019C) #a_shn.sub
glyphs.append(0x00C3) #_vocL_skt
glyphs.append(0x002E) #K
glyphs.append(0x00FD) #two_shn
glyphs.append(0x01B0) #bba_mon.sub
glyphs.append(0x01E5) #ja_khm.sub
glyphs.append(0x0139) #ra_ii
glyphs.append(0x0274) #medial_ra_bt2.w1_5
glyphs.append(0x00D8) #tone5_wpk
glyphs.append(0x01C2) #ghwa_epk.sub2
glyphs.append(0x01CC) #za_shn.sub2
glyphs.append(0x0123) #one_atn
glyphs.append(0x004E) #k
glyphs.append(0x00F9) #fa_rpg
glyphs.append(0x01E3) #cha_khm.sub
glyphs.append(0x00D5) #tone2_wpk
glyphs.append(0x01D6) #fa_shn.sub2
glyphs.append(0x0242) #medial_ha_obl_u
glyphs.append(0x00BB) #sha_skt
glyphs.append(0x00F5) #tone6_shn
glyphs.append(0x007C) #threequarters
glyphs.append(0x020E) #_tall_aa
glyphs.append(0x00CC) #sha_skn
glyphs.append(0x0131) #kinzi_asat
glyphs.append(0x0061) #asciitilde
glyphs.append(0x0083) #cha
glyphs.append(0x024A) #medial_ya.alt2
glyphs.append(0x0275) #medial_ya.alt3
glyphs.append(0x01EA) #nya_khmn.sub2
glyphs.append(0x0015) #two
glyphs.append(0x00C6) #jha_mon
glyphs.append(0x0276) #medial_ra_bt2.w2_5
glyphs.append(0x00DA) #ywa_epk
glyphs.append(0x01B5) #_medialMa_mon.sub2
glyphs.append(0x01DE) #fa_rpg.sub2
glyphs.append(0x027F) #medial_ra_wa_ha_ovl_tt.w1_5
glyphs.append(0x027A) #medial_ra_tt_bt2.w1_5
glyphs.append(0x0159) #cha.sub2
glyphs.append(0x0202) #za_khm.sub2
glyphs.append(0x0156) #ca.sub
glyphs.append(0x0219) #dot_below
glyphs.append(0x0029) #F
glyphs.append(0x01F5) #na_khm.sub
glyphs.append(0x0192) #la.sub2
glyphs.append(0x003E) #bracketleft
glyphs.append(0x00AD) #two_m
glyphs.append(0x0041) #asciicircum
glyphs.append(0x00C4) #_vocLL_skt
glyphs.append(0x014C) #ka.sub
glyphs.append(0x00BF) #vocL_skt
glyphs.append(0x026A) #medial_ra_tt_bt.w3
glyphs.append(0x00FC) #one_shn
glyphs.append(0x0096) #ma
glyphs.append(0x0193) #sa.sub
glyphs.append(0x0049) #f
glyphs.append(0x0078) #ordmasculine
glyphs.append(0x0102) #seven_shn
glyphs.append(0x00DF) #ee_kyh
glyphs.append(0x0104) #nine_shn
glyphs.append(0x0279) #medial_ra_tt_bt2.w3
glyphs.append(0x0075) #paragraph
glyphs.append(0x001C) #nine
glyphs.append(0x00BE) #vocRR_skt
glyphs.append(0x0059) #v
glyphs.append(0x0122) #exclam_atn
glyphs.append(0x00D4) #tone1_wpk
glyphs.append(0x00A0) #i_m
glyphs.append(0x0260) #medial_ra_wa_ha_ovl.w2_5
glyphs.append(0x00B4) #nine_m
glyphs.append(0x0247) #_aa_ai
glyphs.append(0x028E) #_uu_dot_spacing_rsbwide.alt
glyphs.append(0x003F) #backslash
glyphs.append(0x0262) #medial_ra_wa_tt.w2_5
glyphs.append(0x0065) #currency
glyphs.append(0x0258) #medial_ra_wa_ha_ovl.w2
glyphs.append(0x00B7) #locative
glyphs.append(0x0127) #_tall_aa_asat
glyphs.append(0x01C3) #ka_shn.sub
glyphs.append(0x01A5) #ssa_dda
glyphs.append(0x012A) #asat_dup
glyphs.append(0x0114) #ddha_khm
glyphs.append(0x0019) #six
glyphs.append(0x0153) #gha.sub2
glyphs.append(0x0282) #medial_ra_wa_ovl.w3
glyphs.append(0x00B2) #seven_m
glyphs.append(0x0086) #nya
glyphs.append(0x0024) #A
glyphs.append(0x0246) #_aa_anusvara
glyphs.append(0x024E) #ta_wa_ovl.sub2
glyphs.append(0x0243) #medial_ha_obl_uu
glyphs.append(0x0034) #Q
glyphs.append(0x01DF) #ga_khm.sub
glyphs.append(0x01AF) #jha_mon.sub2
glyphs.append(0x0226) #kinzi_ii_anusvara
glyphs.append(0x0044) #a
glyphs.append(0x0110) #nya_khmn
glyphs.append(0x0121) #hm_khm
glyphs.append(0x0190) #ra.sub2
glyphs.append(0x0080) #gha
glyphs.append(0x0185) #pha.sub2
glyphs.append(0x0158) #cha.sub
glyphs.append(0x009B) #sa
glyphs.append(0x01EF) #dda_khm.sub
glyphs.append(0x014E) #kha.sub
glyphs.append(0x0168) #tta.sub
glyphs.append(0x018F) #ra.sub
glyphs.append(0x000F) #comma
glyphs.append(0x00A2) #u_m
glyphs.append(0x0088) #tta
glyphs.append(0x0249) #_ii.alt
glyphs.append(0x013D) #medial_ra_wa_tt
glyphs.append(0x0125) #ra_atn
glyphs.append(0x00F3) #tone3_shn
glyphs.append(0x016E) #dda.sub
glyphs.append(0x0099) #la
glyphs.append(0x014A) #_u
glyphs.append(0x0118) #ha_khm
glyphs.append(0x021E) #medial_wa_ovl
glyphs.append(0x023D) #_uu_dot.alt
glyphs.append(0x0210) #_i
glyphs.append(0x01BC) #pwa_wpk.sub2
glyphs.append(0x00D3) #ue_wpk
glyphs.append(0x0213) #_e
glyphs.append(0x014D) #ka.sub2
glyphs.append(0x014B) #_u_spacing
glyphs.append(0x01D1) #na_shn.sub
glyphs.append(0x0079) #guillemotright
glyphs.append(0x0284) #medial_ra_wa_ha_ovl.w3
glyphs.append(0x0021) #greater
glyphs.append(0x0172) #nna_tta
glyphs.append(0x00CF) #kepho_skn
glyphs.append(0x0097) #ya
glyphs.append(0x0002) #nonmarkingreturn
glyphs.append(0x002F) #L
glyphs.append(0x010D) #cha_khm
glyphs.append(0x0089) #ttha
glyphs.append(0x00AC) #one_m
glyphs.append(0x009F) #a_shn
glyphs.append(0x0076) #cedilla
glyphs.append(0x01B8) #sha_skn.sub2
glyphs.append(0x01FA) #ha_khm.sub2
glyphs.append(0x0154) #nga.sub
glyphs.append(0x01E0) #ga_khm.sub2
glyphs.append(0x01EC) #tta_khm.sub2
glyphs.append(0x004F) #l
glyphs.append(0x006D) #logicalnot
glyphs.append(0x0232) #ta_wa.sub
glyphs.append(0x01F1) #ddha_khm.sub
glyphs.append(0x0184) #pha.sub
glyphs.append(0x0142) #medial_ra_bt
glyphs.append(0x0062) #exclamdown
glyphs.append(0x021F) #medial_ha
glyphs.append(0x016C) #dda_dda
glyphs.append(0x00AA) #great_sa
glyphs.append(0x0283) #medial_ra_wa_ovl_tt.w3
glyphs.append(0x0254) #medial_ra_wa_ovl_tt.w2
glyphs.append(0x00E6) #da_shn
glyphs.append(0x01BF) #ywa_epk.sub
glyphs.append(0x01C5) #kha_shn.sub
glyphs.append(0x0116) #na_khm
glyphs.append(0x00F1) #_f_above_shn
glyphs.append(0x0171) #ddha.sub2
glyphs.append(0x0277) #medial_ra_tt_bt2.w2_5
glyphs.append(0x01FC) #hha_khm.sub2
glyphs.append(0x0003) #space
glyphs.append(0x00C7) #bba_mon
glyphs.append(0x0195) #ha.sub
glyphs.append(0x0161) #nnya.notail
glyphs.append(0x0237) #medial_ra_wa_ha_tt.w2
glyphs.append(0x0250) #medial_ra_tt.w2_5
glyphs.append(0x0072) #twosuperior
glyphs.append(0x0098) #ra
glyphs.append(0x0266) #medial_ra_tt_bt.w2_5
glyphs.append(0x0175) #nna.sub
glyphs.append(0x01F2) #ddha_khm.sub2
glyphs.append(0x002A) #G
glyphs.append(0x0113) #dda_khm
glyphs.append(0x0140) #medial_ra_tt
glyphs.append(0x01DC) #ha_shn.sub2
glyphs.append(0x0124) #two_atn
glyphs.append(0x01F8) #sa_khm.sub2
glyphs.append(0x003A) #W
glyphs.append(0x01A1) #great_sa.sub2
glyphs.append(0x01AE) #jha_mon.sub
glyphs.append(0x0240) #_uu_dot_spacing.alt
glyphs.append(0x01D3) #pha_shn.sub
glyphs.append(0x00A5) #e_mon
glyphs.append(0x0164) #nnya.sub2
glyphs.append(0x004A) #g
glyphs.append(0x00EC) #ha_shn
glyphs.append(0x0289) #_u_spacing_rsbwide
glyphs.append(0x01FF) #xa_khm.sub
glyphs.append(0x019E) #e_m.sub
glyphs.append(0x0252) #medial_ra_wa_ovl.w2
glyphs.append(0x0000) #.notdef
glyphs.append(0x005A) #w
glyphs.append(0x0225) #_ii_anusvara
glyphs.append(0x00AB) #zero_m
glyphs.append(0x0186) #ba.sub
glyphs.append(0x026B) #medial_ra.w1_5
glyphs.append(0x01F7) #sa_khm.sub
glyphs.append(0x0145) #medial_ra_bt2.w2
glyphs.append(0x0278) #medial_ra_bt2.w3
glyphs.append(0x012B) #medial_ha_u
glyphs.append(0x0150) #ga.sub
glyphs.append(0x0014) #one
glyphs.append(0x00E8) #pha_shn
glyphs.append(0x01B6) #_medialLa_mon.sub2
glyphs.append(0x0165) #tta_tta
glyphs.append(0x0212) #medial_ha.sub2
glyphs.append(0x01CA) #ca_shn.sub2
glyphs.append(0x0287) #ra_rsbwide220
glyphs.append(0x023A) #afii301
glyphs.append(0x0189) #bha.sub2
glyphs.append(0x025E) #medial_ya_wa_ha_ovl
glyphs.append(0x01AD) #nga_mon.sub2
glyphs.append(0x0025) #B
glyphs.append(0x00DE) #u_kyh
glyphs.append(0x00B8) #completed
glyphs.append(0x00E7) #na_shn
glyphs.append(0x0035) #R
glyphs.append(0x00AE) #three_m
glyphs.append(0x026E) #medial_ra_tt_bt.w1_5
glyphs.append(0x0045) #b
glyphs.append(0x010E) #ja_khm
glyphs.append(0x0234) #medial_ra_wa_ha
glyphs.append(0x0271) #medial_ra_wa_ha.w2_5
glyphs.append(0x0256) #medial_wa_ha_dot_ovl
glyphs.append(0x01BA) #tha_wpk.sub2
glyphs.append(0x015C) #jha.sub
glyphs.append(0x01C0) #ywa_epk.sub2
glyphs.append(0x0197) #lla_lla
glyphs.append(0x0055) #r
glyphs.append(0x0239) #afii61664
glyphs.append(0x01E8) #jha_khm.sub2
glyphs.append(0x011C) #xa_khm
glyphs.append(0x0095) #bha
glyphs.append(0x017F) #na.alt
glyphs.append(0x00F2) #tone2_shn
glyphs.append(0x0233) #dot_below_pr
glyphs.append(0x00C5) #nga_mon
glyphs.append(0x0001) #null
glyphs.append(0x0155) #nga.sub2
glyphs.append(0x0241) #medial_ha_obl
glyphs.append(0x0054) #q
glyphs.append(0x0224) #ha_tail.alt
glyphs.append(0x01ED) #ttha_khm.sub
glyphs.append(0x014F) #kha.sub2
glyphs.append(0x00BC) #ssa_skt
glyphs.append(0x025D) #medial_ra_wa_ovl_tt.w2_5
glyphs.append(0x0176) #nna.sub2
glyphs.append(0x0196) #ha.sub2
glyphs.append(0x0130) #kinzi_anusvara
glyphs.append(0x00F6) #ctone2_shn
glyphs.append(0x0005) #quotedbl
glyphs.append(0x022A) #medial_ha_uu_dot
glyphs.append(0x007B) #onehalf
glyphs.append(0x0006) #numbersign
glyphs.append(0x0030) #M
glyphs.append(0x01A7) #ssa_skt.sub2
glyphs.append(0x0235) #medial_ra_wa_ha.w2
glyphs.append(0x0273) #medial_ra_wa_ha.w3
glyphs.append(0x001B) #eight
glyphs.append(0x00B1) #six_m
glyphs.append(0x0043) #grave
glyphs.append(0x017A) #tha.sub2
glyphs.append(0x0269) #medial_ra_bt.w3
glyphs.append(0x007D) #ka
glyphs.append(0x0223) #uni25CC
glyphs.append(0x00B5) #little_section
glyphs.append(0x0221) #_i.alt
glyphs.append(0x0050) #m
glyphs.append(0x0111) #tta_khm
glyphs.append(0x00CE) #hathi_skn
glyphs.append(0x01C8) #ga_shn.sub2
glyphs.append(0x0129) #ai_dup
glyphs.append(0x024B) #medial_ha_obl_u_dot
glyphs.append(0x00D6) #tone3_wpk
glyphs.append(0x01E2) #ca_khm.sub2
glyphs.append(0x0205) #ra_atn.sub
glyphs.append(0x01E4) #cha_khm.sub2
glyphs.append(0x0191) #la.sub
glyphs.append(0x016D) #dda_ddha
glyphs.append(0x0151) #ga.sub2
glyphs.append(0x00CA) #_medialMa_mon
glyphs.append(0x0228) #medial_wa_dot
glyphs.append(0x01AC) #nga_mon.sub
glyphs.append(0x003D) #Z
glyphs.append(0x0081) #nga
glyphs.append(0x006A) #copyright
glyphs.append(0x01E6) #ja_khm.sub2
glyphs.append(0x0066) #yen
glyphs.append(0x01BE) #nna_epk.sub2
glyphs.append(0x0108) #_ai_atn
glyphs.append(0x022B) #medial_ha_dot
glyphs.append(0x01B3) #bbe_mon.sub2
glyphs.append(0x002B) #H
glyphs.append(0x01A2) #sha_skt.sub
glyphs.append(0x008F) #da
glyphs.append(0x00E1) #kha_shn
glyphs.append(0x01C9) #ca_shn.sub
glyphs.append(0x00B9) #aforementioned
glyphs.append(0x0120) #qn_khm
glyphs.append(0x003B) #X
glyphs.append(0x0263) #medial_ra_wa_tt.w1_5
glyphs.append(0x0148) #medial_ra_tt_bt3
glyphs.append(0x005F) #bar
glyphs.append(0x01CE) #nya_shn.sub2
glyphs.append(0x004B) #h
glyphs.append(0x0183) #pa.sub2
glyphs.append(0x0109) #sone_shn
glyphs.append(0x0179) #tha.sub
glyphs.append(0x0204) #ra_khm.sub2
glyphs.append(0x005B) #x
glyphs.append(0x0268) #medial_ra_tt.w3
glyphs.append(0x0141) #medial_ra_tt.w2
glyphs.append(0x00A4) #e_m
glyphs.append(0x006B) #ordfeminine
glyphs.append(0x0180) #na.sub
glyphs.append(0x00DB) #ghwa_epk
glyphs.append(0x0026) #C
glyphs.append(0x022C) #_uu_dot
glyphs.append(0x0194) #sa.sub2
glyphs.append(0x01D9) #tha_shn.sub
glyphs.append(0x013B) #medial_ra_wa
glyphs.append(0x00A6) #o_m
glyphs.append(0x01A8) #vocR_skt.sub
glyphs.append(0x005E) #braceleft
glyphs.append(0x0157) #ca.sub2
glyphs.append(0x00EF) #e_shn
glyphs.append(0x025F) #medial_ya_wa_ovl
glyphs.append(0x007A) #onequarter
glyphs.append(0x011E) #ra_khm
glyphs.append(0x019D) #a_shn.sub2
glyphs.append(0x025B) #medial_ra_wa.w2_5
glyphs.append(0x0170) #ddha.sub
glyphs.append(0x011A) #fa_khm
glyphs.append(0x01D4) #pha_shn.sub2
glyphs.append(0x009A) #wa
glyphs.append(0x0101) #six_shn
glyphs.append(0x028A) #_uu_spacing_rsbwide
glyphs.append(0x0112) #ttha_khm
glyphs.append(0x0036) #S
glyphs.append(0x018B) #ma.sub2
glyphs.append(0x0208) #_uu_spacing
glyphs.append(0x0004) #exclam
glyphs.append(0x0084) #ja
glyphs.append(0x0245) #tall_aa_ai
glyphs.append(0x0167) #tta.alt
glyphs.append(0x0046) #c
glyphs.append(0x024C) #medial_ha_obl_uu_dot
glyphs.append(0x00D7) #tone4_wpk
glyphs.append(0x0178) #ta.sub2
glyphs.append(0x0020) #equal
glyphs.append(0x01CB) #za_shn.sub
glyphs.append(0x0160) #nya.notail
glyphs.append(0x0056) #s
glyphs.append(0x01F6) #na_khm.sub2
glyphs.append(0x0270) #medial_ra_wa_ha_tt.w1_5
glyphs.append(0x024F) #medial_ra.w2_5
glyphs.append(0x017B) #da.sub
glyphs.append(0x0082) #ca
glyphs.append(0x0011) #period
glyphs.append(0x01F3) #dha_khm.sub
glyphs.append(0x001D) #colon
glyphs.append(0x0251) #medial_ra_wa_ovl
glyphs.append(0x00C2) #_vocRR_skt
glyphs.append(0x0280) #medial_ra_wa_tt.w3
glyphs.append(0x0201) #za_khm.sub
glyphs.append(0x015E) #nya.sub
glyphs.append(0x0016) #three
glyphs.append(0x0063) #cent
glyphs.append(0x01B2) #bbe_mon.sub
glyphs.append(0x01B1) #bba_mon.sub2
glyphs.append(0x0092) #pa
glyphs.append(0x0133) #kinzi_i
glyphs.append(0x006E) #uni00AD
glyphs.append(0x023B) #medial_wa_dot_ovl
glyphs.append(0x023C) #medial_ha_dot_obl
glyphs.append(0x01A0) #great_sa.sub
glyphs.append(0x0031) #N
glyphs.append(0x011B) #redup_khm
glyphs.append(0x00E5) #nya_shn
glyphs.append(0x01A6) #ssa_skt.sub
glyphs.append(0x01F9) #ha_khm.sub
glyphs.append(0x00C1) #_vocR_skt
glyphs.append(0x00BD) #vocR_skt
glyphs.append(0x0128) #asat_anusvara
glyphs.append(0x006C) #guillemotleft
glyphs.append(0x01EE) #ttha_khm.sub2
glyphs.append(0x022D) #_u_dot
glyphs.append(0x01E7) #jha_khm.sub
glyphs.append(0x0105) #tone1_khm
glyphs.append(0x0218) #anusvara
glyphs.append(0x00A9) #medial_wa
glyphs.append(0x0051) #n
glyphs.append(0x00F4) #tone5_shn
glyphs.append(0x00A7) #au
glyphs.append(0x01AA) #vocRR_skt.sub
glyphs.append(0x0060) #braceright
glyphs.append(0x0103) #eight_shn
glyphs.append(0x001A) #seven
glyphs.append(0x0187) #ba.sub2
glyphs.append(0x0067) #brokenbar
glyphs.append(0x020C) #medial_ya_wa_ha
glyphs.append(0x00ED) #medial_wa_shn
glyphs.append(0x0166) #tta_ttha
glyphs.append(0x00A1) #ii
glyphs.append(0x0126) #tone_pak
glyphs.append(0x027B) #medial_ra_wa.w1_5
glyphs.append(0x01B4) #_medialNa_mon.sub2
glyphs.append(0x0087) #nnya
glyphs.append(0x01D7) #ba_shn.sub
glyphs.append(0x00EA) #ba_shn
glyphs.append(0x00B0) #five_m
glyphs.append(0x00FA) #tone5_rpg
glyphs.append(0x00C8) #bbe_mon
glyphs.append(0x0090) #dha
glyphs.append(0x018C) #ya.sub
glyphs.append(0x0152) #gha.sub
glyphs.append(0x01FD) #fa_khm.sub
glyphs.append(0x00D0) #tha_wpk
glyphs.append(0x022E) #_u_dot_spacing
glyphs.append(0x00E4) #za_shn
glyphs.append(0x0064) #sterling
glyphs.append(0x015D) #nya.narrow
glyphs.append(0x0138) #ra_i
glyphs.append(0x00DD) #oe_kyh
glyphs.append(0x002C) #I
glyphs.append(0x0093) #pha
glyphs.append(0x021D) #medial_ya
glyphs.append(0x01CD) #nya_shn.sub
glyphs.append(0x0149) #medial_ra_tt_bt2.w2
glyphs.append(0x01BD) #nna_epk.sub
glyphs.append(0x01AB) #vocRR_skt.sub2
glyphs.append(0x020F) #_aa
glyphs.append(0x018D) #ya.sub2
glyphs.append(0x003C) #Y
glyphs.append(0x0253) #medial_ra_wa_ovl_tt
glyphs.append(0x010C) #ca_khm
glyphs.append(0x0214) #_ai
glyphs.append(0x0288) #ra_rsbwide150.alt2
glyphs.append(0x004C) #i
glyphs.append(0x00E9) #fa_shn
glyphs.append(0x0220) #medial_ha_obl.sub2
glyphs.append(0x0106) #tone3_khm
glyphs.append(0x00EB) #tha_shn
glyphs.append(0x00DC) #i_gkn
glyphs.append(0x00D9) #nna_epk
glyphs.append(0x005C) #y
glyphs.append(0x008E) #tha
glyphs.append(0x012C) #medial_ha_uu
glyphs.append(0x0071) #plusminus
glyphs.append(0x001F) #less
glyphs.append(0x0203) #ra_khm.sub
glyphs.append(0x0200) #xa_khm.sub2
glyphs.append(0x023E) #_u_dot.alt
glyphs.append(0x0264) #medial_ra_wa_ha_tt.w2_5
glyphs.append(0x00C9) #_medialNa_mon
glyphs.append(0x0231) #ta_wa.sub2
glyphs.append(0x020A) #medial_ya_ha
glyphs.append(0x00EE) #aa_shn
glyphs.append(0x01D8) #ba_shn.sub2
glyphs.append(0x00FB) #zero_shn
glyphs.append(0x0117) #sa_khm
glyphs.append(0x0222) #_ai.alt
glyphs.append(0x025C) #medial_ra_wa_ovl.w2_5
glyphs.append(0x013E) #medial_ra_wa_tt.w2
glyphs.append(0x01DB) #ha_shn.sub
glyphs.append(0x01E9) #nya_khmn.sub
glyphs.append(0x0010) #hyphen
glyphs.append(0x016A) #ttha.sub
glyphs.append(0x0027) #D
glyphs.append(0x01C7) #ga_shn.sub
glyphs.append(0x0238) #uni200B
glyphs.append(0x027E) #medial_ra_wa_ha_ovl.w1_5
glyphs.append(0x0018) #five
glyphs.append(0x0037) #T
glyphs.append(0x0135) #kinzi_ii_mon
glyphs.append(0x01D5) #fa_shn.sub
glyphs.append(0x009E) #a_m
glyphs.append(0x0255) #medial_wa_ha_ovl
glyphs.append(0x00CB) #_medialLa_mon
glyphs.append(0x028C) #_uu_dot_spacing_rsbwide
glyphs.append(0x0074) #acute
glyphs.append(0x0047) #d
glyphs.append(0x00CD) #eu_skn
glyphs.append(0x01FB) #hha_khm.sub
glyphs.append(0x0057) #t
glyphs.append(0x01BB) #pwa_wpk.sub
glyphs.append(0x0286) #medial_ra_wa_ovl_tt.w1_5
glyphs.append(0x000C) #parenright
glyphs.append(0x00AF) #four_m
glyphs.append(0x0216) #_o_mon
glyphs.append(0x0100) #five_shn
glyphs.append(0x025A) #medial_ra_wa_ha_ovl_tt.w2
glyphs.append(0x0285) #medial_ra_wa_ha_ovl_tt.w3
glyphs.append(0x00E0) #ka_shn
glyphs.append(0x000A) #quotesingle
glyphs.append(0x0244) #tall_aa_anusvara
glyphs.append(0x026F) #medial_ra_bt3
glyphs.append(0x012E) #_i_anusvara
glyphs.append(0x0012) #slash
glyphs.append(0x008A) #dda
glyphs.append(0x012F) #kinzi_ai
glyphs.append(0x0137) #ra_asat
glyphs.append(0x01A4) #ssa_ttha
glyphs.append(0x0094) #ba
glyphs.append(0x020B) #medial_ya.alt1
glyphs.append(0x01A9) #vocR_skt.sub2
glyphs.append(0x026C) #medial_ra_tt.w1_5
glyphs.append(0x0132) #kinzi_i_anusvara
glyphs.append(0x0032) #O
glyphs.append(0x01C1) #ghwa_epk.sub
glyphs.append(0x017E) #dha.sub2
glyphs.append(0x0199) #lla.sub2
glyphs.append(0x017C) #da.sub2
glyphs.append(0x019A) #a_m.sub
glyphs.append(0x00E3) #ca_shn
glyphs.append(0x00F8) #etone_shn
glyphs.append(0x0052) #o
glyphs.append(0x028D) #_u_dot_spacing_rsbwide.alt
glyphs.append(0x000E) #plus
glyphs.append(0x0144) #medial_ra_bt3.w2
glyphs.append(0x0073) #threesuperior
glyphs.append(0x016B) #ttha.sub2
glyphs.append(0x027D) #medial_ra_wa_ovl.w1_5
glyphs.append(0x0107) #_a_atn
glyphs.append(0x00D2) #eu_wpk
glyphs.append(0x000B) #parenleft
glyphs.append(0x0272) #medial_ra_wa.w3
glyphs.append(0x013C) #medial_ra_wa.w2
glyphs.append(0x01DA) #tha_shn.sub2
glyphs.append(0x0022) #question
glyphs.append(0x022F) #_uu_dot_spacing
glyphs.append(0x0177) #ta.sub
glyphs.append(0x028B) #_u_dot_spacing_rsbwide
glyphs.append(0x026D) #medial_ra_bt.w1_5
glyphs.append(0x023F) #_u_dot_spacing.alt
glyphs.append(0x028F) #dot_below_spacing_rsbwide100
glyphs.append(0x021B) #virama
glyphs.append(0x006F) #registered
glyphs.append(0x002D) #J
glyphs.append(0x017D) #dha.sub
glyphs.append(0x0069) #dieresis
glyphs.append(0x0143) #medial_ra_bt.w2
glyphs.append(0x0227) #medial_wa_ha_dot
glyphs.append(0x0181) #na.sub2
glyphs.append(0x00B6) #big_section
glyphs.append(0x015F) #nya.sub2
glyphs.append(0x009C) #ha
glyphs.append(0x01F0) #dda_khm.sub2
glyphs.append(0x024D) #ta_wa_ovl.sub
glyphs.append(0x0077) #onesuperior
glyphs.append(0x00A3) #uu
glyphs.append(0x004D) #j
glyphs.append(0x01E1) #ca_khm.sub
glyphs.append(0x005D) #z
glyphs.append(0x01D0) #da_shn.sub2
glyphs.append(0x001E) #semicolon
glyphs.append(0x008C) #nna
glyphs.append(0x00FF) #four_shn
glyphs.append(0x0023) #at
glyphs.append(0x01D2) #na_shn.sub2
glyphs.append(0x007E) #kha
glyphs.append(0x00E2) #ga_shn
glyphs.append(0x00F7) #ctone3_shn
glyphs.append(0x0091) #na
glyphs.append(0x0008) #percent
glyphs.append(0x01F4) #dha_khm.sub2
glyphs.append(0x020D) #medial_ya_wa
glyphs.append(0x015B) #ja.sub2
glyphs.append(0x0040) #bracketright
glyphs.append(0x0028) #E
glyphs.append(0x0211) #_ii
glyphs.append(0x000D) #asterisk
glyphs.append(0x0290) #uA92E
glyphs.append(0x00D1) #pwa_wpk
glyphs.append(0x0038) #U
glyphs.append(0x0146) #medial_ra_tt_bt
glyphs.append(0x0267) #medial_ra.w3
glyphs.append(0x013F) #medial_ra.w2
glyphs.append(0x021A) #visarga
glyphs.append(0x0048) #e
glyphs.append(0x0217) #_e_above
glyphs.append(0x01B9) #tha_wpk.sub
glyphs.append(0x010F) #jha_khm
glyphs.append(0x0058) #u
glyphs.append(0x0236) #medial_ra_wa_ha_tt
glyphs.append(0x012D) #_i_ai
return glyphs
|
davelab6/pyfontaine
|
fontaine/charsets/noto_glyphs/notosansmyanmar_regular.py
|
Python
|
gpl-3.0
| 27,865
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
from django.core.management.base import BaseCommand
from snisi_core.models.Periods import MonthPeriod
from snisi_core.models.Reporting import ExpectedReporting
logger = logging.getLogger(__name__)
class Command(BaseCommand):
def handle(self, *args, **options):
start = MonthPeriod.from_url_str('12-2014').start_on
end = MonthPeriod.from_url_str('01-2015').end_on
bads = ExpectedReporting.objects.filter(
period__start_on__gte=start,
period__end_on__lte=end,
completion_status='')
logger.info("DELETING {} bads".format(bads.count()))
bads.delete()
for exp in ExpectedReporting.objects.filter(
period__start_on__gte=start,
period__end_on__lte=end,
completion_status=ExpectedReporting.COMPLETION_MISSING):
# logger.info(exp)
params = {
'report_class': exp.report_class,
'reporting_role': exp.reporting_role,
'period': exp.period,
'within_period': exp.within_period,
'entity': exp.entity,
'within_entity': exp.within_entity,
'reporting_period': exp.reporting_period,
'extended_reporting_period': exp.extended_reporting_period,
'amount_expected': exp.amount_expected,
'completion_status__in': (
ExpectedReporting.COMPLETION_COMPLETE,
ExpectedReporting.COMPLETION_MATCHING),
}
filter = ExpectedReporting.objects.filter(**params)
if not filter.count():
# no duplicates on this one.
logger.info("... No duplicate")
continue
good = filter.get()
if not exp.satisfied and not exp.arrived_reports.count():
logger.info(". DELETING exp: {}".format(exp))
exp.delete()
else:
logger.info("CAN'T DELETE EXP as satisfied: {}".format(exp))
logger.info(good)
|
yeleman/snisi
|
snisi_maint/management/commands/fix_double_expected.py
|
Python
|
mit
| 2,305
|
from django.conf import settings
from django import template
# Depending on your django version, `reverse` and `NoReverseMatch` has been moved.
# From django 2.0 they've been moved to `django.urls`
try:
from django.urls import reverse, NoReverseMatch
except ImportError:
from django.core.urlresolvers import reverse, NoReverseMatch
register = template.Library()
# From django 1.9 `assignment_tag` is deprecated in favour of `simple_tag`
try:
simple_tag = register.simple_tag
except AttributeError:
simple_tag = register.assignment_tag
MAX_LENGTH_BOOTSTRAP_COLUMN = 12
def css_classes_for_field(field, custom_classes):
orig_class = field.field.widget.attrs.get('class', '')
required = 'required' if field.field.required else ''
classes = field.css_classes(' '.join([orig_class, custom_classes, required]))
return classes
@register.filter()
def get_label(field, custom_classes=''):
classes = css_classes_for_field(field, custom_classes)
return field.label_tag(attrs={'class': classes}, label_suffix='')
@register.filter()
def add_class(field, custom_classes=''):
classes = css_classes_for_field(field, custom_classes)
try:
# For widgets like SelectMultiple, checkboxselectmultiple
field.field.widget.widget.attrs.update({'class': classes})
except:
field.field.widget.attrs.update({'class': classes})
return field
@register.filter()
def widget_type(field):
if isinstance(field, dict):
return 'adminreadonlyfield'
try:
# For widgets like SelectMultiple, checkboxselectmultiple
widget_type = field.field.widget.widget.__class__.__name__.lower()
except:
widget_type = field.field.widget.__class__.__name__.lower()
return widget_type
@register.filter()
def placeholder(field, placeholder=''):
field.field.widget.attrs.update({'placeholder': placeholder})
return field
def sidebar_menu_setting():
return getattr(settings, 'BOOTSTRAP_ADMIN_SIDEBAR_MENU', True)
@simple_tag
def display_sidebar_menu(has_filters=False):
if has_filters:
# Always display the menu in change_list.html
return True
return sidebar_menu_setting()
@register.filter()
def class_for_field_boxes(line):
size_column = MAX_LENGTH_BOOTSTRAP_COLUMN // len(line.fields)
return 'col-sm-{0}'.format(size_column or 1) # if '0' replace with 1
|
django-admin-bootstrap/django-admin-bootstrap
|
bootstrap_admin/templatetags/bootstrap_admin_template_tags.py
|
Python
|
bsd-3-clause
| 2,390
|
# -*- coding: UTF-8 -*-
#
# Copyright 2011-2019 by Dirk Gorissen, Stephen Rauch and Contributors
# All rights reserved.
# This file is part of the Pycel Library, Licensed under GPLv3 (the 'License')
# You may not use this work except in compliance with the License.
# You may obtain a copy of the Licence at:
# https://www.gnu.org/licenses/gpl-3.0.en.html
"""
Python equivalents of various excel functions
"""
import math
import sys
from decimal import Decimal, ROUND_DOWN, ROUND_HALF_UP, ROUND_UP
import numpy as np
from pycel.excelutil import (
coerce_to_number,
DIV0,
ERROR_CODES,
flatten,
handle_ifs,
is_array_arg,
is_number,
list_like,
NA_ERROR,
NUM_ERROR,
VALUE_ERROR,
)
from pycel.lib.function_helpers import (
excel_helper,
excel_math_func,
)
if sys.version_info >= (3, 8): # pragma: no cover
prod = math.prod
else: # pragma: no cover
# ::TODO:: remove when Pyton 3.7 is obsolete
def prod(values):
return np.prod(list(values))
def _numerics(*args, keep_bools=False, to_number=lambda x: x):
# ignore non numeric cells
args = tuple(flatten(args))
error = next((x for x in args if x in ERROR_CODES), None)
if error is not None:
# return the first error in the list
return error
else:
args = (
to_number(a) for a in args if keep_bools or not isinstance(a, bool)
)
return tuple(x for x in args if isinstance(x, (int, float)))
@excel_math_func
def abs_(value1):
# Excel reference: https://support.microsoft.com/en-us/office/
# ABS-function-3420200F-5628-4E8C-99DA-C99D7C87713C
return abs(value1)
@excel_math_func
def atan2_(x_num, y_num):
# Excel reference: https://support.microsoft.com/en-us/office/
# ATAN2-function-C04592AB-B9E3-4908-B428-C96B3A565033
# swap arguments
return math.atan2(y_num, x_num)
@excel_math_func
def ceiling(number, significance):
# Excel reference: https://support.microsoft.com/en-us/office/
# CEILING-function-0A5CD7C8-0720-4F0A-BD2C-C943E510899F
if significance < 0 < number:
return NUM_ERROR
if number == 0 or significance == 0:
return 0
if number < 0 < significance:
return significance * int(number / significance)
else:
return significance * math.ceil(number / significance)
@excel_math_func
def ceiling_math(number, significance=1, mode=0):
# Excel reference: https://support.microsoft.com/en-us/office/
# ceiling-math-function-80f95d2f-b499-4eee-9f16-f795a8e306c8
if significance == 0:
return 0
significance = abs(significance)
if mode and number < 0:
significance = -significance
return significance * math.ceil(number / significance)
@excel_math_func
def ceiling_precise(number, significance=1):
# Excel reference: https://support.microsoft.com/en-us/office/
# ceiling-precise-function-f366a774-527a-4c92-ba49-af0a196e66cb
if significance == 0:
return 0
significance = abs(significance)
return significance * math.ceil(number / significance)
def conditional_format_ids(*args):
"""helper function for getting conditional format ids"""
# Excel reference: https://support.microsoft.com/en-us/office/
# E09711A3-48DF-4BCB-B82C-9D8B8B22463D
results = []
for condition, dxf_id, stop_if_true in args:
if condition:
results.append(dxf_id)
if stop_if_true:
break
return tuple(results)
@excel_math_func
def even(value):
# Excel reference: https://support.microsoft.com/en-us/office/
# even-function-197b5f06-c795-4c1e-8696-3c3b8a646cf9
return math.copysign(math.ceil(abs(value) / 2) * 2, value)
@excel_math_func
def fact(value):
# Excel reference: https://support.microsoft.com/en-us/office/
# fact-function-ca8588c2-15f2-41c0-8e8c-c11bd471a4f3
return math.factorial(int(value)) if value >= 0 else NUM_ERROR
@excel_helper(cse_params=-1)
def factdouble(value):
# Excel reference: https://support.microsoft.com/en-us/office/
# fact-function-ca8588c2-15f2-41c0-8e8c-c11bd471a4f3
if isinstance(value, bool):
return VALUE_ERROR
value = coerce_to_number(value, convert_all=True)
if isinstance(value, str):
return VALUE_ERROR
if value < 0:
return NUM_ERROR
return np.sum(np.prod(range(int(value), 0, -2), axis=0))
@excel_math_func
def floor(number, significance):
# Excel reference: https://support.microsoft.com/en-us/office/
# FLOOR-function-14BB497C-24F2-4E04-B327-B0B4DE5A8886
if significance < 0 < number:
return NUM_ERROR
if number == 0:
return 0
if significance == 0:
return DIV0
return significance * math.floor(number / significance)
@excel_math_func
def floor_math(number, significance=1, mode=0):
# Excel reference: https://support.microsoft.com/en-us/office/
# floor-math-function-c302b599-fbdb-4177-ba19-2c2b1249a2f5
if significance == 0:
return 0
significance = abs(significance)
if mode and number < 0:
significance = -significance
return significance * math.floor(number / significance)
@excel_math_func
def floor_precise(number, significance=1):
# Excel reference: https://support.microsoft.com/en-us/office/
# floor-precise-function-f769b468-1452-4617-8dc3-02f842a0702e
if significance == 0:
return 0
significance = abs(significance)
return significance * math.floor(number / significance)
@excel_math_func
def int_(value1):
# Excel reference: https://support.microsoft.com/en-us/office/
# INT-function-A6C4AF9E-356D-4369-AB6A-CB1FD9D343EF
return math.floor(value1)
@excel_math_func
def ln(arg):
# Excel reference: https://support.microsoft.com/en-us/office/
# LN-function-81FE1ED7-DAC9-4ACD-BA1D-07A142C6118F
return math.log(arg)
@excel_math_func
def log(number, base=10):
# Excel reference: https://support.microsoft.com/en-us/office/
# LOG-function-4E82F196-1CA9-4747-8FB0-6C4A3ABB3280
return math.log(number, base)
@excel_math_func
def mod(number, divisor):
# Excel reference: https://support.microsoft.com/en-us/office/
# MOD-function-9b6cd169-b6ee-406a-a97b-edf2a9dc24f3
if divisor == 0:
return DIV0
return number % divisor
@excel_helper(cse_params=None, err_str_params=-1, number_params=0)
def npv(rate, *args):
# Excel reference: https://support.microsoft.com/en-us/office/
# NPV-function-8672CB67-2576-4D07-B67B-AC28ACF2A568
rate += 1
cashflow = [x for x in flatten(args, coerce=coerce_to_number)
if is_number(x) and not isinstance(x, bool)]
return sum(x * rate ** -i for i, x in enumerate(cashflow, start=1))
@excel_math_func
def odd(value):
# Excel reference: https://support.microsoft.com/en-us/office/
# odd-function-deae64eb-e08a-4c88-8b40-6d0b42575c98
return math.copysign(math.ceil((abs(value) - 1) / 2) * 2 + 1, value)
@excel_math_func
def power(number, power):
# Excel reference: https://support.microsoft.com/en-us/office/
# POWER-function-D3F2908B-56F4-4C3F-895A-07FB519C362A
if number == power == 0:
# Really excel? What were you thinking?
return NA_ERROR
try:
return number ** power
except ZeroDivisionError:
return DIV0
@excel_math_func
def pv(rate, nper, pmt, fv=0, type_=0):
# Excel reference: https://support.microsoft.com/en-us/office/
# pv-function-23879d31-0e02-4321-be01-da16e8168cbd
if rate != 0:
val = pmt * (1 + rate * type_) * ((1 + rate) ** nper - 1) / rate
return 1 / (1 + rate) ** nper * (-fv - val)
else:
return -fv - pmt * nper
@excel_math_func
def round_(number, num_digits=0):
# Excel reference: https://support.microsoft.com/en-us/office/
# ROUND-function-c018c5d8-40fb-4053-90b1-b3e7f61a213c
num_digits = int(num_digits)
if num_digits >= 0: # round to the right side of the point
return float(Decimal(repr(number)).quantize(
Decimal(repr(pow(10, -num_digits))),
rounding=ROUND_HALF_UP
))
# see https://docs.python.org/2/library/functions.html#round
# and https://gist.github.com/ejamesc/cedc886c5f36e2d075c5
else:
return round(number, num_digits)
def _round(number, num_digits, rounding):
num_digits = int(num_digits)
quant = Decimal(f'1E{"+-"[num_digits >= 0]}{abs(num_digits)}')
return float(Decimal(repr(number)).quantize(quant, rounding=rounding))
@excel_math_func
def rounddown(number, num_digits):
# Excel reference: https://support.microsoft.com/en-us/office/
# ROUNDDOWN-function-2EC94C73-241F-4B01-8C6F-17E6D7968F53
return _round(number, num_digits, rounding=ROUND_DOWN)
@excel_math_func
def roundup(number, num_digits):
# Excel reference: https://support.microsoft.com/en-us/office/
# ROUNDUP-function-F8BC9B23-E795-47DB-8703-DB171D0C42A7
return _round(number, num_digits, rounding=ROUND_UP)
@excel_math_func
def sign(value):
# Excel reference: https://support.microsoft.com/en-us/office/
# sign-function-109c932d-fcdc-4023-91f1-2dd0e916a1d8
return -1 if value < 0 else int(bool(value))
def sum_(*args):
data = _numerics(*args)
if isinstance(data, str):
return data
# if no non numeric cells, return zero (is what excel does)
return sum(data)
def sumif(rng, criteria, sum_range=None):
# Excel reference: https://support.microsoft.com/en-us/office/
# SUMIF-function-169b8c99-c05c-4483-a712-1697a653039b
# WARNING:
# - The following is not currently implemented:
# The sum_range argument does not have to be the same size and shape as
# the range argument. The actual cells that are added are determined by
# using the upper leftmost cell in the sum_range argument as the
# beginning cell, and then including cells that correspond in size and
# shape to the range argument.
if sum_range is None:
sum_range = rng
return sumifs(sum_range, rng, criteria)
def sumifs(sum_range, *args):
# Excel reference: https://support.microsoft.com/en-us/office/
# SUMIFS-function-C9E748F5-7EA7-455D-9406-611CEBCE642B
if not list_like(sum_range):
sum_range = ((sum_range, ), )
coords = handle_ifs(args, sum_range)
# A returned string is an error code
if isinstance(coords, str):
return coords
return sum(_numerics(
(sum_range[r][c] for r, c in coords),
keep_bools=True
))
def sumproduct(*args):
# Excel reference: https://support.microsoft.com/en-us/office/
# SUMPRODUCT-function-16753E75-9F68-4874-94AC-4D2145A2FD2E
# find any errors
error = next((i for i in flatten(args) if i in ERROR_CODES), None)
if error:
return error
# verify array sizes match
sizes = set()
for arg in args:
if not isinstance(arg, tuple):
if all(not isinstance(arg, tuple) for arg in args):
# the all scalers case is valid.
values = (
x if isinstance(x, (float, int, type(None))) and not isinstance(x, bool) else 0
for x in args
)
try:
return prod(values)
except TypeError:
pass
return VALUE_ERROR
assert is_array_arg(arg)
sizes.add((len(arg), len(arg[0])))
if len(sizes) != 1:
return VALUE_ERROR
# put the values into numpy vectors
values = np.array(tuple(tuple(
x if isinstance(x, (float, int)) and not isinstance(x, bool) else 0
for x in flatten(arg)) for arg in args))
# return the sum product
return np.sum(np.prod(values, axis=0))
@excel_math_func
def trunc(number, num_digits=0):
# Excel reference: https://support.microsoft.com/en-us/office/
# TRUNC-function-8B86A64C-3127-43DB-BA14-AA5CEB292721
factor = 10 ** int(num_digits)
return int(number * factor) / factor
# Older mappings for excel functions that match Python built-in and keywords
x_abs = abs_
xatan2 = atan2_
x_int = int_
x_round = round_
xsum = sum_
|
dgorissen/pycel
|
src/pycel/excellib.py
|
Python
|
gpl-3.0
| 12,274
|
"""
Measure resonators, one at a time, with the readout tone centered in the filterbank bin.
"""
from __future__ import division
import time
import numpy as np
from kid_readout.roach import analog, calculate, hardware_tools, tools
from kid_readout.measurement import acquire, basic
from kid_readout.equipment import hardware, starcryo_temps
from equipment.srs import lockin
from equipment.custom import mmwave_source
from kid_readout.settings import LOCKIN_SERIAL_PORT
acquire.show_settings()
acquire.show_git_status()
import logging
logger = acquire.get_script_logger(__file__, level=logging.DEBUG)
# Parameters
suffix = 'test'
attenuations = [0]
f_center = 1e6 * np.array([3420.5])
fractional_frequency_shift = 0
f_center *= (1 + fractional_frequency_shift)
df_baseband_target = 60e3
f_sweep_span = 2e6 # The total span of the baseband tones
f_lo_spacing = 2.5e3 # This is the smallest resolution available
f_baseband_minimum = 100e6 # Keep the tones away from the LO by at least this frequency.
sweep_length_seconds = 0.01
# Hardware
temperature = starcryo_temps.Temperature()
lock = lockin.SR830(serial_device=LOCKIN_SERIAL_PORT)
lock.identification # This seems to be necessary to wake up the lockin
mmw = mmwave_source.MMWaveSource()
mmw.set_attenuator_ticks(0, 0)
mmw.multiplier_input = 'thermal'
mmw.ttl_modulation_source = "roach_2"
mmw.waveguide_twist_angle = 0
conditioner = analog.HeterodyneMarkII()
hw = hardware.Hardware(temperature, lock, mmw, conditioner)
ri = hardware_tools.r2h14_with_mk2(initialize=True, use_config=False)
ri.set_modulation_output('high')
ri.iq_delay = -1
ri.adc_valon.set_ref_select(0) # internal
assert np.all(ri.adc_valon.get_phase_locks())
# Calculate sweep parameters, LO and baseband sweep frequencies
ri_state = ri.state
tone_sample_exponent = int(np.round(np.log2(ri_state.adc_sample_rate / df_baseband_target)))
df_baseband = ri_state.adc_sample_rate / 2 ** tone_sample_exponent
num_sweep_tones = int(f_sweep_span / df_baseband)
f_baseband = f_baseband_minimum + ri.state.adc_sample_rate / 2 ** tone_sample_exponent * np.arange(num_sweep_tones)
f_lo_center = f_lo_spacing * np.round((f_center - f_baseband.mean()) / f_lo_spacing)
logger.info("Sweep using {:d} tones spanning {:.1f} MHz with resolution {:.0f} Hz (2^{:d} samples)".format(
num_sweep_tones, 1e-6 * f_baseband.ptp(), df_baseband, tone_sample_exponent))
# Run
npd = acquire.new_npy_directory(suffix=suffix)
tic = time.time()
try:
for lo_index, f_lo in enumerate(f_lo_center):
assert np.all(ri.adc_valon.get_phase_locks())
tools.set_and_attempt_external_phase_lock(ri=ri, f_lo=1e-6 * f_lo, f_lo_spacing=1e-6 * f_lo_spacing)
for attenuation_index, attenuation in enumerate(attenuations):
ri.set_dac_attenuator(attenuation)
ri.set_tone_baseband_freqs(freqs=1e-6 * np.array([f_baseband[0]]), nsamp=2 ** tone_sample_exponent)
time.sleep(1)
npd.write(ri.get_adc_measurement())
tools.optimize_fft_gain(ri, fraction_of_maximum=0.5)
state = hw.state()
state['lo_index'] = lo_index
state['attenuation_index'] = attenuation_index
sweep = acquire.run_sweep(ri=ri, tone_banks=1e-6 * (f_lo + f_baseband[:, np.newaxis]),
num_tone_samples=2 ** tone_sample_exponent, length_seconds=sweep_length_seconds,
state=state, verbose=True)[0]
npd.write(sweep)
finally:
ri.set_modulation_output('high')
npd.close()
print("Wrote {}".format(npd.root_path))
print("Elapsed time {:.0f} minutes.".format((time.time() - tic) / 60))
|
ColumbiaCMB/kid_readout
|
apps/data_taking_scripts/cooldown/2017-11-23_starcryo/r2h14_test.py
|
Python
|
bsd-2-clause
| 3,665
|
# CTK: Cherokee Toolkit
#
# Authors:
# Alvaro Lopez Ortega <alvaro@alobbs.com>
#
# Copyright (C) 2009 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
from Box import Box
from Table import Table
from Widget import Widget
from RawHTML import RawHTML
from Submitter import Submitter
from Container import Container
from HiddenField import HiddenField
from util import *
HTML_TABLE = """
<div class="propstable">%s</div>
"""
HTML_ENTRY = """
<div class="entry" id="%(id)s" %(props)s>
<div class="title">%(title)s</div>
<div class="widget">%(widget_html)s</div>
<div class="comment">%(comment)s</div>
<div class="after"></div>
</div>
"""
HEADERS = ['<link rel="stylesheet" type="text/css" href="/CTK/css/CTK.css" />']
class PropsTableEntry (Widget):
"""Property Table Entry"""
def __init__ (self, title, widget, comment, props=None):
Widget.__init__ (self)
self.title = title
self.widget = widget
self.comment = comment
self.props = ({}, props)[bool(props)]
if 'id' in self.props:
self.id = self.props.pop('id')
def Render (self):
# Render child
if self.widget:
w_rend = self.widget.Render()
else:
w_rend = Container().Render()
w_html = w_rend.html
w_rend.html = ''
# Mix both
render = Widget.Render (self)
render += w_rend
props = {'id': self.id,
'props': props_to_str(self.props),
'title': self.title,
'widget_html': w_html,
'comment': self.comment}
render.html += HTML_ENTRY %(props)
return render
class PropsTable (Box):
"""Property Table: Formatting"""
def __init__ (self, **kwargs):
Box.__init__ (self, {'class': "propstable"})
def Add (self, title, widget, comment):
self += PropsTableEntry (title, widget, comment)
class PropsTableAuto (PropsTable):
"""Property Table: Adds Submitters and constants"""
def __init__ (self, url, **kwargs):
PropsTable.__init__ (self, **kwargs)
self._url = url
self.constants = {}
def AddConstant (self, key, val):
self.constants[key] = val
def Add (self, title, widget, comment):
submit = Submitter (self._url)
if self.constants:
box = Container()
box += widget
for key in self.constants:
box += HiddenField ({'name': key, 'value': self.constants[key]})
submit += box
else:
submit += widget
return PropsTable.Add (self, title, submit, comment)
class PropsAuto (Widget):
def __init__ (self, url, **kwargs):
Widget.__init__ (self, **kwargs)
self._url = url
self.constants = {}
self.entries = []
def AddConstant (self, key, val):
self.constants[key] = val
def Add (self, title, widget, comment, use_submitter=True):
# No constants, just the widget
if not self.constants:
self.entries.append ((title, widget, comment, use_submitter))
return
# Wrap it
box = Container()
box += widget
for key in self.constants:
box += HiddenField ({'name': key, 'value': self.constants[key]})
self.entries.append ((title, box, comment, use_submitter))
def Render (self):
render = Widget.Render(self)
for e in self.entries:
title, widget, comment, use_submitter = e
id = self.id
props = ''
if use_submitter:
submit = Submitter (self._url)
submit += widget
else:
submit = widget
widget_r = submit.Render()
widget_html = widget_r.html
html = HTML_ENTRY %(locals())
render.html += html
render.js += widget_r.js
render.headers += widget_r.headers
render.helps += widget_r.helps
render.html = HTML_TABLE %(render.html)
render.headers += HEADERS
return render
|
chetan/cherokee
|
admin/CTK/CTK/PropsTable.py
|
Python
|
gpl-2.0
| 4,826
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# The Qubes OS Project, http://www.qubes-os.org
#
# Copyright (C) 2015 Marek Marczykowski-Górecki <marmarek@invisiblethingslab.com>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
#
import os
import shutil
import subprocess
import tempfile
import unittest
from qubes.qubes import QubesVmCollection
import qubes.qubes
VM_PREFIX = "test-"
@unittest.skipUnless(os.path.exists('/usr/bin/rpmsign') and
os.path.exists('/usr/bin/rpmbuild'),
'rpm-sign and/or rpm-build not installed')
class TC_00_Dom0Upgrade(qubes.tests.QubesTestCase):
cleanup_paths = []
pkg_name = 'qubes-test-pkg'
dom0_update_common_opts = ['--disablerepo=*', '--enablerepo=test',
'--setopt=test.copy_local=1']
@classmethod
def generate_key(cls, keydir):
gpg_opts = ['gpg', '--quiet', '--no-default-keyring',
'--homedir', keydir]
p = subprocess.Popen(gpg_opts + ['--gen-key', '--batch'],
stdin=subprocess.PIPE,
stderr=open(os.devnull, 'w'))
p.stdin.write('''
Key-Type: RSA
Key-Length: 1024
Key-Usage: sign
Name-Real: Qubes test
Expire-Date: 0
%commit
'''.format(keydir=keydir))
p.stdin.close()
p.wait()
subprocess.check_call(gpg_opts + ['-a', '--export',
'--output', os.path.join(keydir, 'pubkey.asc')])
p = subprocess.Popen(gpg_opts + ['--with-colons', '--list-keys'],
stdout=subprocess.PIPE)
for line in p.stdout.readlines():
fields = line.split(':')
if fields[0] == 'pub':
return fields[4][-8:].lower()
raise RuntimeError
@classmethod
def setUpClass(cls):
super(TC_00_Dom0Upgrade, cls).setUpClass()
cls.tmpdir = tempfile.mkdtemp()
cls.cleanup_paths += [cls.tmpdir]
cls.keyid = cls.generate_key(cls.tmpdir)
p = subprocess.Popen(['sudo', 'dd',
'status=none', 'of=/etc/yum.repos.d/test.repo'],
stdin=subprocess.PIPE)
p.stdin.write('''
[test]
name = Test
baseurl = file:///tmp/repo
enabled = 1
''')
p.stdin.close()
p.wait()
@classmethod
def tearDownClass(cls):
subprocess.check_call(['sudo', 'rm', '-f',
'/etc/yum.repos.d/test.repo'])
for dir in cls.cleanup_paths:
shutil.rmtree(dir)
cls.cleanup_paths = []
def setUp(self):
self.qc = QubesVmCollection()
self.qc.lock_db_for_writing()
self.qc.load()
self.updatevm = self.qc.add_new_vm("QubesProxyVm",
name="%supdatevm" % VM_PREFIX,
template=self.qc.get_default_template())
self.updatevm.create_on_disk(verbose=False)
self.saved_updatevm = self.qc.get_updatevm_vm()
self.qc.set_updatevm_vm(self.updatevm)
self.qc.save()
self.qc.unlock_db()
subprocess.call(['sudo', 'rpm', '-e', self.pkg_name],
stderr=open(os.devnull, 'w'))
subprocess.check_call(['sudo', 'rpm', '--import',
os.path.join(self.tmpdir, 'pubkey.asc')])
self.updatevm.start()
def remove_vms(self, vms):
self.qc.lock_db_for_writing()
self.qc.load()
self.qc.set_updatevm_vm(self.qc[self.saved_updatevm.qid])
for vm in vms:
if isinstance(vm, str):
vm = self.qc.get_vm_by_name(vm)
else:
vm = self.qc[vm.qid]
if vm.is_running():
try:
vm.force_shutdown()
except:
pass
try:
vm.remove_from_disk()
except OSError:
pass
self.qc.pop(vm.qid)
self.qc.save()
self.qc.unlock_db()
def tearDown(self):
vmlist = [vm for vm in self.qc.values() if vm.name.startswith(
VM_PREFIX)]
self.remove_vms(vmlist)
subprocess.call(['sudo', 'rpm', '-e', self.pkg_name], stderr=open(
os.devnull, 'w'))
subprocess.call(['sudo', 'rpm', '-e', 'gpg-pubkey-{}'.format(
self.keyid)], stderr=open(os.devnull, 'w'))
for pkg in os.listdir(self.tmpdir):
if pkg.endswith('.rpm'):
os.unlink(pkg)
def create_pkg(self, dir, name, version):
spec_path = os.path.join(dir, name+'.spec')
spec = open(spec_path, 'w')
spec.write(
'''
Name: {name}
Summary: Test Package
Version: {version}
Release: 1
Vendor: Invisible Things Lab
License: GPL
Group: Qubes
URL: http://www.qubes-os.org
%description
Test package
%install
%files
'''.format(name=name, version=version)
)
spec.close()
subprocess.check_call(
['rpmbuild', '--quiet', '-bb', '--define', '_rpmdir {}'.format(dir),
spec_path])
pkg_path = os.path.join(dir, 'x86_64',
'{}-{}-1.x86_64.rpm'.format(name, version))
subprocess.check_call(['sudo', 'chmod', 'go-rw', '/dev/tty'])
subprocess.check_call(
['rpm', '--quiet', '--define=_gpg_path {}'.format(dir),
'--define=_gpg_name {}'.format("Qubes test"),
'--addsign', pkg_path],
stdin=open(os.devnull),
stdout=open(os.devnull, 'w'),
stderr=subprocess.STDOUT)
subprocess.check_call(['sudo', 'chmod', 'go+rw', '/dev/tty'])
return pkg_path
def send_pkg(self, filename):
p = self.updatevm.run('mkdir -p /tmp/repo; cat > /tmp/repo/{}'.format(
os.path.basename(
filename)), passio_popen=True)
p.stdin.write(open(filename).read())
p.stdin.close()
p.wait()
self.updatevm.run('cd /tmp/repo; createrepo .', wait=True)
def test_000_update(self):
filename = self.create_pkg(self.tmpdir, self.pkg_name, '1.0')
subprocess.check_call(['sudo', 'rpm', '-i', filename])
filename = self.create_pkg(self.tmpdir, self.pkg_name, '2.0')
self.send_pkg(filename)
logpath = os.path.join(self.tmpdir, 'dom0-update-output.txt')
try:
subprocess.check_call(['sudo', 'qubes-dom0-update', '-y'] +
self.dom0_update_common_opts,
stdout=open(logpath, 'w'),
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
self.fail("qubes-dom0-update failed: " + open(
logpath).read())
retcode = subprocess.call(['rpm', '-q', '{}-1.0'.format(
self.pkg_name)], stdout=open(os.devnull, 'w'))
self.assertEqual(retcode, 1, 'Package {}-1.0 still installed after '
'update'.format(self.pkg_name))
retcode = subprocess.call(['rpm', '-q', '{}-2.0'.format(
self.pkg_name)], stdout=open(os.devnull, 'w'))
self.assertEqual(retcode, 0, 'Package {}-2.0 not installed after '
'update'.format(self.pkg_name))
def test_010_instal(self):
filename = self.create_pkg(self.tmpdir, self.pkg_name, '1.0')
self.send_pkg(filename)
logpath = os.path.join(self.tmpdir, 'dom0-update-output.txt')
try:
subprocess.check_call(['sudo', 'qubes-dom0-update', '-y'] +
self.dom0_update_common_opts + [
self.pkg_name],
stdout=open(logpath, 'w'),
stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
self.fail("qubes-dom0-update failed: " + open(
logpath).read())
retcode = subprocess.call(['rpm', '-q', '{}-1.0'.format(
self.pkg_name)], stdout=open('/dev/null', 'w'))
self.assertEqual(retcode, 0, 'Package {}-1.0 not installed'.format(
self.pkg_name))
def test_020_install_wrong_sign(self):
subprocess.call(['sudo', 'rpm', '-e', 'gpg-pubkey-{}'.format(
self.keyid)])
filename = self.create_pkg(self.tmpdir, self.pkg_name, '1.0')
self.send_pkg(filename)
logpath = os.path.join(self.tmpdir, 'dom0-update-output.txt')
try:
subprocess.check_call(['sudo', 'qubes-dom0-update', '-y'] +
self.dom0_update_common_opts + [
self.pkg_name],
stdout=open(logpath, 'w'),
stderr=subprocess.STDOUT)
self.fail("qubes-dom0-update unexpectedly succeeded: " + open(
logpath).read())
except subprocess.CalledProcessError:
pass
retcode = subprocess.call(['rpm', '-q', '{}-1.0'.format(
self.pkg_name)], stdout=open('/dev/null', 'w'))
self.assertEqual(retcode, 1,
'Package {}-1.0 installed although '
'signature is invalid'.format(self.pkg_name))
def test_030_install_unsigned(self):
filename = self.create_pkg(self.tmpdir, self.pkg_name, '1.0')
subprocess.check_call(['rpm', '--delsign', filename],
stdout=open(os.devnull, 'w'),
stderr=subprocess.STDOUT)
self.send_pkg(filename)
logpath = os.path.join(self.tmpdir, 'dom0-update-output.txt')
try:
subprocess.check_call(['sudo', 'qubes-dom0-update', '-y'] +
self.dom0_update_common_opts +
[self.pkg_name],
stdout=open(logpath, 'w'),
stderr=subprocess.STDOUT
)
self.fail("qubes-dom0-update unexpectedly succeeded: " + open(
logpath).read())
except subprocess.CalledProcessError:
pass
retcode = subprocess.call(['rpm', '-q', '{}-1.0'.format(
self.pkg_name)], stdout=open('/dev/null', 'w'))
self.assertEqual(retcode, 1,
'UNSIGNED package {}-1.0 installed'.format(self.pkg_name))
|
adrelanos/qubes-core-admin
|
tests/dom0_update.py
|
Python
|
gpl-2.0
| 11,230
|
from blaze import byteproto as proto
from . import lldescriptors, llindexers
#------------------------------------------------------------------------
# Data Descriptor
#------------------------------------------------------------------------
class DataDescriptor(object):
""" DataDescriptors are the underlying, low-level references to data
that is returned by manifest Indexable objects (i.e. objects backed
by real data of some variety).
Whereas traditional data interfaces use iterators to programmatically
retrieve data, Blaze preserves the ability to expose data in bulk
form at as low a level as possible.
"""
def __init__(self, id, nbytes, datashape):
# XXX: whatever, just something unique for now
self.id = id
self.nbytes = nbytes
self.datashape = datashape
def __str__(self):
return self.id
def __repr__(self):
return self.id
#------------------------------------------------------------------------
# Generic adapters
#------------------------------------------------------------------------
# From (roughly) least to most preferable
def asstrided(self, copy=False):
""" Returns the buffer as a memoryview. If **copy** is False, then the
memoryview just references the underlying data if possible. If
**copy** is True, then the memoryview will always expose a new copy of
the data.
In a C level implementation of the DataDescriptor interface, this
returns a (void*) pointer to the data.
"""
raise NotImplementedError
def asindex(self):
""" Returns an indexer that can index the source with given
coordinates
"""
def as_tile_indexer(self, copy=False):
""" Returns the contents of the buffer as an indexer returning
N-dimensional memoryviews.
A 1D chunk is a degenerate tile
If **copy** is False, then tries to return just views of
data if possible.
"""
raise NotImplementedError
def as_chunked_iterator(self, copy=False):
"""Return a ChunkIterator
"""
raise NotImplementedError
# NOTE: Buffered streams can be thought of as 1D tiles.
# TODO: Remove stream interface and expose tiling properties in graph
# TODO: metadata
def asstream(self):
""" Returns a Python iterable which returns **chunksize** elements
at a time from the buffer. This is identical to the iterable interface
around Buffer objects.
If **chunksize** is greater than 1, then returns a memoryview of the
elements if they are contiguous, or a Tuple otherwise.
"""
raise NotImplementedError
def asstreamlist(self):
""" Returns an iterable of Stream objects, which should be read sequentially
(i.e. after exhausting the first stream, the second stream is read,
etc.)
"""
raise NotImplementedError
#------------------------------------------------------------------------
# Python Reference Implementations
#------------------------------------------------------------------------
# struct Buffer {
# int length;
# char* format;
# int* shape;
# int* strides;
# int readonly;
# }
class Buffer(object):
""" Describes a region of memory. Implements the memoryview interface.
"""
desctype = "buffer"
# TODO: Add support for event callbacks when certain ranges are
# written
# TODO: Why do we want this? ~Stephen
#callbacks = Dict(Tuple, Function)
def tobytes(self):
pass
def tolist(self):
pass
# struct Stream {
# int length;
# char* format;
# int chunksize;
# int (*next)();
# }
class Stream(DataDescriptor):
""" Describes a data item that must be retrieved by calling a function
to load data into memory. Represents a scan over data. The returned
data is a copy and should be considered to be owned by the caller.
"""
desctype = "stream"
#------------------------------------------------------------------------
# Data descriptor implementations
#------------------------------------------------------------------------
# struct Chunk {
# int pointer;
# int* shape;
# int* strides;
# int itemsize;
# int readonly;
# }
class SqlDataDescriptor(DataDescriptor):
def __init__(self, id, conn, query):
self.id = id
self.conn = conn
self.query = query
def asbuffer(self, copy=False):
self.conn.execute(self.query)
return self.conn.fetchone()
class CArrayDataDescriptor(DataDescriptor):
def __init__(self, id, nbytes, datashape, carray):
super(CArrayDataDescriptor, self).__init__(id, nbytes, datashape)
self.carray = carray
self.itemsize = carray.itemsize
def as_chunked_iterator(self, copy=False):
"""Return a ChunkIterator
"""
return llindexers.CArrayChunkIterator(self.carray, self.datashape)
|
davidcoallier/blaze
|
blaze/sources/descriptors/datadescriptor.py
|
Python
|
bsd-2-clause
| 5,036
|
# -*- coding: utf-8 -*-
"""
Test ImageOverlay
-----------------
"""
from jinja2 import Template
import folium
from folium import plugins
def test_image_overlay():
"""Test image overlay."""
data = [[[1, 0, 0, 1], [0, 0, 0, 0], [0, 0, 0, 0]],
[[1, 1, 0, 0.5], [0, 0, 1, 1], [0, 0, 1, 1]]]
m = folium.Map()
io = plugins.ImageOverlay(data, [[0, -180], [90, 180]],
mercator_project=True)
io.add_to(m)
m._repr_html_()
out = m._parent.render()
# Verify the URL generation.
url = ('data:image/png;base64,'
'iVBORw0KGgoAAAANSUhEUgAAAAMAAAACCAYAAACddGYaAAA'
'AF0lEQVR42mP4z8AARFDw/z/DeiA5H4QBV60H6ABl9ZIAAAAASUVORK5CYII=')
assert io.url == url
# Verify the script part is okay.
tmpl = Template("""
var {{this.get_name()}} = L.imageOverlay(
'{{ this.url }}',
{{ this.bounds }},
{{ this.options }}
).addTo({{this._parent.get_name()}});
""")
assert tmpl.render(this=io) in out
bounds = m.get_bounds()
assert bounds == [[0, -180], [90, 180]], bounds
|
talespaiva/folium
|
tests/plugins/test_image_overlay.py
|
Python
|
mit
| 1,170
|
from __future__ import absolute_import
import responses
from exam import fixture
from django.contrib.auth.models import AnonymousUser
from django.test import RequestFactory
from sentry.plugins.bases.issue2 import PluginError
from sentry.testutils import PluginTestCase
from sentry.utils import json
from social_auth.models import UserSocialAuth
from sentry_plugins.github.plugin import GitHubPlugin
class GitHubPluginTest(PluginTestCase):
@fixture
def plugin(self):
return GitHubPlugin()
@fixture
def request(self):
return RequestFactory()
def test_conf_key(self):
assert self.plugin.conf_key == "github"
def test_entry_point(self):
self.assertPluginInstalled("github", self.plugin)
def test_get_issue_label(self):
group = self.create_group(message="Hello world", culprit="foo.bar")
assert self.plugin.get_issue_label(group, 1) == "GH-1"
def test_get_issue_url(self):
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
assert self.plugin.get_issue_url(group, 1) == "https://github.com/getsentry/sentry/issues/1"
def test_is_configured(self):
assert self.plugin.is_configured(None, self.project) is False
self.plugin.set_option("repo", "getsentry/sentry", self.project)
assert self.plugin.is_configured(None, self.project) is True
@responses.activate
def test_create_issue(self):
responses.add(
responses.POST,
"https://api.github.com/repos/getsentry/sentry/issues",
json={"number": 1, "title": "Hello world"},
)
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
request = self.request.get("/")
request.user = AnonymousUser()
form_data = {"title": "Hello", "description": "Fix this."}
with self.assertRaises(PluginError):
self.plugin.create_issue(request, group, form_data)
request.user = self.user
self.login_as(self.user)
UserSocialAuth.objects.create(
user=self.user, provider=self.plugin.auth_provider, extra_data={"access_token": "foo"}
)
assert self.plugin.create_issue(request, group, form_data) == 1
request = responses.calls[0].request
assert request.headers["Authorization"] == "Bearer foo"
payload = json.loads(request.body)
assert payload == {"title": "Hello", "body": "Fix this.", "assignee": None}
@responses.activate
def test_link_issue(self):
responses.add(
responses.GET,
"https://api.github.com/repos/getsentry/sentry/issues/1",
json={"number": 1, "title": "Hello world"},
)
responses.add(
responses.POST,
"https://api.github.com/repos/getsentry/sentry/issues/1/comments",
json={"body": "Hello"},
)
self.plugin.set_option("repo", "getsentry/sentry", self.project)
group = self.create_group(message="Hello world", culprit="foo.bar")
request = self.request.get("/")
request.user = AnonymousUser()
form_data = {"comment": "Hello", "issue_id": "1"}
with self.assertRaises(PluginError):
self.plugin.link_issue(request, group, form_data)
request.user = self.user
self.login_as(self.user)
UserSocialAuth.objects.create(
user=self.user, provider=self.plugin.auth_provider, extra_data={"access_token": "foo"}
)
assert self.plugin.link_issue(request, group, form_data) == {"title": "Hello world"}
request = responses.calls[-1].request
assert request.headers["Authorization"] == "Bearer foo"
payload = json.loads(request.body)
assert payload == {"body": "Hello"}
|
beeftornado/sentry
|
tests/sentry_plugins/github/test_plugin.py
|
Python
|
bsd-3-clause
| 3,947
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
# Copyright (C) 2022 RERO.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Invenio module that adds administration panel to the system."""
import os
from setuptools import find_packages, setup
readme = open('README.rst').read()
history = open('CHANGES.rst').read()
tests_require = [
'invenio-theme>=1.3.4',
'pytest-invenio>=1.4.0',
]
extras_require = {
'docs': [
'Sphinx>=4.2.0',
],
'access': [
'invenio-access>=1.0.0',
],
'tests': tests_require,
}
extras_require['all'] = []
for reqs in extras_require.values():
extras_require['all'].extend(reqs)
setup_requires = [
'Babel>=2.8',
'pytest-runner>=2.6.2',
]
install_requires = [
'Flask-Admin>=1.5.6',
'Flask-Menu>=0.5.0',
'Flask-Principal>=0.4.0',
'importlib_metadata>=4.4',
'invenio-accounts>=1.2.1',
'invenio-base>=1.2.5',
'invenio-db>=1.0.9',
]
packages = find_packages()
# Get the version string. Cannot be done with import!
g = {}
with open(os.path.join('invenio_admin', 'version.py'), 'rt') as fp:
exec(fp.read(), g)
version = g['__version__']
setup(
name='invenio-admin',
version=version,
description=__doc__,
long_description=readme + '\n\n' + history,
keywords='invenio admin flask-admin',
license='MIT',
author='CERN',
author_email='info@inveniosoftware.org',
url='https://github.com/inveniosoftware/invenio-admin',
packages=packages,
zip_safe=False,
include_package_data=True,
platforms='any',
entry_points={
'invenio_base.apps': [
'invenio_admin = invenio_admin:InvenioAdmin',
],
'invenio_base.blueprints': [
'invenio_admin = invenio_admin.views:blueprint',
],
'invenio_access.actions': [
'admin_access = invenio_admin.permissions:action_admin_access',
]
},
extras_require=extras_require,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_require,
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Development Status :: 5 - Production/Stable',
],
)
|
inveniosoftware/invenio-admin
|
setup.py
|
Python
|
mit
| 2,852
|
# -*- coding: utf-8 -*-
""" Plexus (c) 2015 enen92
This file contains the main menu and the addon directory tree.
All the necessary modules are present in ~/resources/plexus directory
"""
import xbmc
import xbmcgui
import xbmcplugin
import xbmcvfs
import os
import hashlib
import sys
from plexusutils.pluginxbmc import *
from plexusutils.directoryhandle import *
from plexusutils.iofile import *
def my_streams_menu():
if not os.path.exists(mystrm_folder): xbmcvfs.mkdir(mystrm_folder)
files = os.listdir(mystrm_folder)
if files:
for fic in files:
content = readfile(os.path.join(mystrm_folder,fic)).split('|')
if content:
if 'acestream://' in content[1] or '.acelive' in content[1] or '.torrent' in content[1]:
addDir(content[0],content[1],1,content[2],1,False)
elif 'sop://' in content[1]:
addDir(content[0],content[1],2,content[2],1,False)
else:
pass
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_UNSORTED)
xbmcplugin.addSortMethod(int(sys.argv[1]), xbmcplugin.SORT_METHOD_LABEL)
addDir('[B][COLOR maroon]'+translate(30009)+'[/COLOR][/B]',MainURL,11,os.path.join(addonpath,art,'plus-menu.png'),1,False)
def add_stream(name='',url='',iconimage=''):
if not name or not url:
keyb = xbmc.Keyboard('', translate(30010))
keyb.doModal()
if (keyb.isConfirmed()):
stream = keyb.getText()
if stream == '' : sys.exit(0)
else:
if 'acestream://' not in stream and '.acelive' not in stream and 'sop://' not in stream:
mensagemok(translate(40000),translate(30011))
sys.exit(0)
else:
#icon
yes = xbmcgui.Dialog().yesno(translate(30000), translate(30012))
if yes:
iconimage = xbmcgui.Dialog().browse(1, translate(30013),'video','.png|.jpg|.jpeg|.gif',True)
else:
if 'acestream://' in stream or '.acelive' in stream or '.torrent' in stream:
iconimage = os.path.join(addonpath,'resources','art','acestream-menu-item.png')
elif 'sop://' in stream:
iconimage = os.path.join(addonpath,'resources','art','sopcast-menu-item.png')
else:
iconimage = ''
#name
keyb = xbmc.Keyboard('', translate(30014))
keyb.doModal()
if (keyb.isConfirmed()):
name = keyb.getText()
if name == '' : sys.exit(0)
else:
#save
content = name + '|' + stream + '|' + iconimage
filename = hashlib.md5(name + '|' + stream).hexdigest() + '.txt'
save(os.path.join(mystrm_folder,filename),content)
xbmc.executebuiltin("Notification(%s,%s,%i,%s)" % (translate(30000), translate(30015), 1,os.path.join(addonpath,"icon.png")))
xbmc.executebuiltin("Container.Refresh")
else:
content = name + '|' + url + '|' + iconimage
filename = hashlib.md5(name + '|' + url).hexdigest() + '.txt'
save(os.path.join(mystrm_folder,filename),content)
xbmc.executebuiltin("Notification(%s,%s,%i,%s)" % (translate(30000), translate(30015), 1,os.path.join(addonpath,"icon.png")))
xbmc.executebuiltin("Container.Refresh")
def remove_stream(name,url):
filename = hashlib.md5(name + '|' + url).hexdigest() + '.txt'
ficheiro = os.path.join(mystrm_folder,filename)
try:
os.remove(ficheiro)
xbmc.executebuiltin("Notification(%s,%s,%i,%s)" % (translate(30000), translate(30016), 1,os.path.join(addonpath,"icon.png")))
xbmc.executebuiltin("Container.Refresh")
except: pass
|
kanote/TESTE-plexus
|
resources/plexus/mystreams.py
|
Python
|
gpl-2.0
| 3,389
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.