code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
import kronos
@kronos.register('0 0 * * *')
def praise():
print("Kronos makes it really easy to define and schedule "
"tasks with cron!")
@kronos.register('0 0 * * *', args={"--arg1": None, "-b": "some-arg2", "--some-list": ["site1", "site2", "site3"]})
def praise_with_args():
print("Kronos makes it really easy to define and schedule "
"tasks with cron, even with arguments!")
@kronos.register('0 0 * * *', args={})
def praise_with_empty_args():
print("Kronos makes it really easy to define and schedule "
"tasks with cron, even with arguments, that you can forget and it still work!")
|
jeanbaptistelab/django-kronos
|
kronos/tests/project/cron.py
|
Python
|
mit
| 626
|
"""
Copyright (C) 2020 Quinn D Granfor <spootdev@gmail.com>
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
version 2, as published by the Free Software Foundation.
This program is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License version 2 for more details.
You should have received a copy of the GNU General Public License
version 2 along with this program; if not, write to the Free
Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
MA 02110-1301, USA.
"""
# https://github.com/alexmercerind/youtube-search-python
# pip install youtube-search-python
from youtubesearchpython import SearchPlaylists
from youtubesearchpython import SearchVideos
def com_net_yt_search(search_type='video', search_text=None, offset=1,
mode='json', max_results=20):
if search_type == 'video':
return SearchVideos(search_text, offset=offset, mode=mode,
max_results=max_results).result()
else:
return SearchPlaylists(search_text, offset=offset, mode=mode,
max_results=max_results).result()
|
MediaKraken/MediaKraken_Deployment
|
source/common/common_network_youtube_search.py
|
Python
|
gpl-3.0
| 1,346
|
"""
Link extraction for auto scraping
"""
from scrapy.utils.misc import load_object
from .base import BaseLinkExtractor, ALLOWED_SCHEMES
from .html import HtmlLinkExtractor
from .xml import XmlLinkExtractor, RssLinkExtractor, SitemapLinkExtractor, AtomLinkExtractor
from .regex import RegexLinkExtractor
from .ecsv import CsvLinkExtractor
_TYPE_MAP = (
# type, class, ignore value
('regex', RegexLinkExtractor, False),
('xpath', XmlLinkExtractor, False),
('column', CsvLinkExtractor, False),
('html', HtmlLinkExtractor, True),
('rss', RssLinkExtractor, True),
('sitemap', SitemapLinkExtractor, True),
('atom', AtomLinkExtractor, True),
)
def create_linkextractor_from_specs(specs):
"""Return a link extractor instance from specs. By default, return a HtmlLinkExtractor.
"""
specs = specs.copy()
ltype, value = specs.pop('type'), specs.pop('value')
if ltype == 'module':
cls = load_object(value)
return cls(**specs)
for key, cls, ignore in _TYPE_MAP:
if key == ltype:
if ignore:
return cls(**specs)
return cls(value, **specs)
raise ValueError("Invalid link extractor type specification")
|
aazen/DayOne
|
slybot/slybot/linkextractor/__init__.py
|
Python
|
bsd-3-clause
| 1,213
|
import os
import re
import collections
import json
import stat
import copy
from fnmatch import fnmatch
from pathlib import Path, PurePosixPath
from .projman_glob import *
from cudatext import *
import cudatext_cmd
from cudax_lib import get_translation
_ = get_translation(__file__) # i18n
IS_WIN = os.name == 'nt'
PROJECT_EXTENSION = ".cuda-proj"
PROJECT_DIALOG_FILTER = _("CudaText projects|*") + PROJECT_EXTENSION
PROJECT_UNSAVED_NAME = _("(Unsaved project)")
NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD = range(4)
global_project_info = {}
def _file_open(fn, options=''):
gr = ed.get_prop(PROP_INDEX_GROUP)
#print('Opening file in group %d'%gr)
file_open(fn, group=gr, options=options)
# https://stackoverflow.com/questions/377017/test-if-executable-exists-in-python
def which(program):
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
def project_variables():
"""
gives dict with "project variables", which is ok for using from other plugins,
e.g. ExtTools.
add to names {} or $() if you want.
1) predefined var ProjMainFile (defined by right-click menu in ProjMan)
2) predefined var ProjDir (dir of .cuda-proj file)
3) other vars are defined by user in Proj Properties dialog.
"""
res = collections.OrderedDict()
data = global_project_info
res['ProjDir'] = os.path.dirname(data.get('filename', ''))
fn = data.get('mainfile', '')
res['ProjMainFile'] = fn
res['ProjMainFileNameOnly'] = os.path.basename(fn)
res['ProjMainFileNameNoExt'] = '.'.join(os.path.basename(fn).split('.')[0:-1])
data = global_project_info.get('vars', [])
for item in data:
s1, s2 = item.split('=', maxsplit=1)
res[s1] = s2
return res
NodeInfo = collections.namedtuple("NodeInfo", "caption image")
_homedir = os.path.expanduser('~')
def collapse_filename(fn):
if (fn+'/').startswith(_homedir+'/'):
fn = fn.replace(_homedir, '~', 1)
return fn
def nice_filename(path):
return os.path.basename(path) + ' ('+ collapse_filename(os.path.dirname(path)) + ')'
def is_simple_listed(name, masks):
s = name.lower()
for mask in masks.split(' '):
if s.endswith(mask):
return True
return False
def is_mask_listed(s, masks):
for mask in masks.split(';'):
r = fnmatch(s, mask)
#print("fnmatch('%s', '%s') = %d"%(s, mask, int(r)))
if r:
return True
return False
# only Py 3.5 supports os.stat(s).st_file_attributes
# so this is to support Py 3.4
def is_hidden_win32(s):
import ctypes # import here to avoid it on Unix
try:
attrs = ctypes.windll.kernel32.GetFileAttributesW(s)
assert attrs != -1
res = bool(attrs & 2)
except (AttributeError, AssertionError):
res = False
return res
def is_hidden(s):
if IS_WIN:
if s=='':
return False
if s.endswith(':\\'):
return False
return is_hidden_win32(s)
#try:
# return bool(os.stat(s).st_file_attributes & stat.FILE_ATTRIBUTE_HIDDEN)
#except:
# return True
else:
return os.path.basename(s).startswith('.')
def is_win_root(s):
return IS_WIN and s.endswith(':\\')
def is_locked(s):
if IS_WIN:
if s.endswith(':\\'):
return False
return is_hidden_win32(s)
else:
return not os.access(s, os.R_OK)
def _toolbar_add_btn(h_bar, hint, icon=-1, command=''):
toolbar_proc(h_bar, TOOLBAR_ADD_ITEM)
cnt = toolbar_proc(h_bar, TOOLBAR_GET_COUNT)
h_btn = toolbar_proc(h_bar, TOOLBAR_GET_BUTTON_HANDLE, index=cnt-1)
if hint=='-':
button_proc(h_btn, BTN_SET_KIND, BTNKIND_SEP_HORZ)
else:
button_proc(h_btn, BTN_SET_KIND, BTNKIND_ICON_ONLY)
button_proc(h_btn, BTN_SET_HINT, hint)
button_proc(h_btn, BTN_SET_IMAGEINDEX, icon)
button_proc(h_btn, BTN_SET_DATA1, command)
class Command:
goto_history = []
title ="Project" # No _() here, the translation is offered in "translation template.ini".
menuitems = (
# item_caption , item_parent , item_types , item_action
(_("New project") , "proj", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_new_project"),
(_("Open project...") , "proj", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_open_project"),
(_("Recent projects") , "proj", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "collect_recent_projects"),
(_("Save project as...") , "proj", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_save_project_as"),
(_("Add folder...") , "nodes", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_add_folder"),
(_("Add file...") , "nodes", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_add_file"),
(_("Clear project") , "nodes", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_clear_project"),
(_("Remove node") , "nodes", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_remove_node"),
(_("New file...") , "dir", [NODE_DIR], "cuda_project_man.action_new_file"),
(_("Rename...") , "dir", [NODE_DIR], "cuda_project_man.action_rename"),
(_("Delete directory") , "dir", [NODE_DIR], "cuda_project_man.action_delete_directory"),
(_("New directory...") , "dir", [NODE_DIR], "cuda_project_man.action_new_directory"),
(_("Find in directory...") , "dir", [NODE_DIR], "cuda_project_man.action_find_in_directory"),
(_("Open in default application")
, "file", [NODE_FILE], "cuda_project_man.action_open_def"),
(_("Focus in file manager"), "file", [NODE_FILE], "cuda_project_man.action_focus_in_fileman"),
(_("Rename...") , "file", [NODE_FILE], "cuda_project_man.action_rename"),
(_("Delete file") , "file", [NODE_FILE], "cuda_project_man.action_delete_file"),
(_("Set as main file") , "file", [NODE_FILE], "cuda_project_man.action_set_as_main_file"),
("-" , "", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], ""),
(_("Refresh") , "", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_refresh"),
("-" , "", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], ""),
(_("Go to file...") , "", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_go_to_file"),
(_("Project properties..."), "", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_project_properties"),
(_("Configure Project Manager..."), "", [None, NODE_PROJECT, NODE_DIR, NODE_FILE, NODE_BAD], "cuda_project_man.action_config"),
)
options = {
"recent_projects": [],
"no_files": "",
"no_dirs": ".git;.svn",
"no_hidden": True,
"toolbar": True,
"preview": True,
"d_click": False,
}
tree = None
h_dlg = None
h_menu = None
h_menu_cfg = None
def __init__(self):
settings_dir = Path(app_path(APP_DIR_SETTINGS))
self.options_filename = settings_dir / "cuda_project_man.json"
if self.options_filename.exists():
with self.options_filename.open(encoding='utf8') as fin:
self.options = json.load(fin)
self.new_project()
def init_form_main(self):
show_toolbar = self.options.get("toolbar", True)
toolbar_theme = self.options.get("toolbar_theme", "default_16x16")
self.h_dlg = dlg_proc(0, DLG_CREATE)
dlg_proc(self.h_dlg, DLG_PROP_SET, {
'keypreview': True,
'on_key_down': self.form_key_down,
} )
n = dlg_proc(self.h_dlg, DLG_CTL_ADD, prop='toolbar')
dlg_proc(self.h_dlg, DLG_CTL_PROP_SET, index=n, prop={
'name':'bar',
'a_r':('',']'), #anchor to top: l,r,t
'vis': show_toolbar,
'h': 24,
'autosize': True,
} )
self.h_bar = dlg_proc(self.h_dlg, DLG_CTL_HANDLE, index=n)
self.toolbar_imglist = toolbar_proc(self.h_bar, TOOLBAR_GET_IMAGELIST)
self.set_imagelist_size(toolbar_theme, self.toolbar_imglist)
dirname = os.path.join(app_path(APP_DIR_DATA), 'projtoolbaricons', toolbar_theme)
icon_open = imagelist_proc(self.toolbar_imglist, IMAGELIST_ADD, value = os.path.join(dirname, 'open.png'))
icon_save = imagelist_proc(self.toolbar_imglist, IMAGELIST_ADD, value = os.path.join(dirname, 'save.png'))
icon_add_file = imagelist_proc(self.toolbar_imglist, IMAGELIST_ADD, value = os.path.join(dirname, 'add-file.png'))
icon_add_dir = imagelist_proc(self.toolbar_imglist, IMAGELIST_ADD, value = os.path.join(dirname, 'add-dir.png'))
icon_del = imagelist_proc(self.toolbar_imglist, IMAGELIST_ADD, value = os.path.join(dirname, 'del.png'))
icon_cfg = imagelist_proc(self.toolbar_imglist, IMAGELIST_ADD, value = os.path.join(dirname, 'cfg.png'))
toolbar_proc(self.h_bar, TOOLBAR_THEME)
_toolbar_add_btn(self.h_bar, hint=_('Open project'), icon=icon_open, command='cuda_project_man.action_open_project' )
_toolbar_add_btn(self.h_bar, hint=_('Save project as'), icon=icon_save, command='cuda_project_man.action_save_project_as' )
_toolbar_add_btn(self.h_bar, hint='-' )
_toolbar_add_btn(self.h_bar, hint=_('Add folder'), icon=icon_add_dir, command='cuda_project_man.action_add_folder' )
_toolbar_add_btn(self.h_bar, hint=_('Add file'), icon=icon_add_file, command='cuda_project_man.action_add_file' )
_toolbar_add_btn(self.h_bar, hint=_('Remove node'), icon=icon_del, command='cuda_project_man.action_remove_node' )
_toolbar_add_btn(self.h_bar, hint='-' )
_toolbar_add_btn(self.h_bar, hint=_('Configure'), icon=icon_cfg, command='cuda_project_man.menu_cfg')
toolbar_proc(self.h_bar, TOOLBAR_UPDATE)
n = dlg_proc(self.h_dlg, DLG_CTL_ADD, prop='treeview')
dlg_proc(self.h_dlg, DLG_CTL_PROP_SET, index=n, prop={
'name':'tree',
'a_t':('bar', ']'),
'a_r':('',']'), #anchor to entire form
'a_b':('',']'),
'on_menu': 'cuda_project_man.tree_on_menu',
'on_unfold': 'cuda_project_man.tree_on_unfold',
'on_click': 'cuda_project_man.tree_on_click',
'on_click_dbl': 'cuda_project_man.tree_on_click_dbl',
} )
self.tree = dlg_proc(self.h_dlg, DLG_CTL_HANDLE, index=n)
self.tree_imglist = tree_proc(self.tree, TREE_GET_IMAGELIST)
tree_proc(self.tree, TREE_PROP_SHOW_ROOT, text='0')
tree_proc(self.tree, TREE_ITEM_DELETE, 0)
self.icon_init()
self.ICON_ALL = self.icon_get('_')
self.ICON_DIR = self.icon_get('_dir')
self.ICON_PROJ = self.icon_get('_proj')
self.ICON_BAD = self.icon_get('_bad')
self.ICON_ZIP = self.icon_get('_zip')
self.ICON_BIN = self.icon_get('_bin')
self.ICON_IMG = self.icon_get('_img')
def init_panel(self, and_activate=True):
# already inited?
if self.tree:
return
self.init_form_main()
#dlg_proc(self.h_dlg, DLG_SCALE)
tree_proc(self.tree, TREE_THEME) #TREE_THEME only after DLG_SCALE
app_proc(PROC_SIDEPANEL_ADD_DIALOG, (self.title, self.h_dlg, 'project.png'))
if and_activate:
self.do_show(True)
self.action_refresh()
self.generate_context_menu()
def show_panel(self):
self.do_show(False)
def focus_panel(self):
self.do_show(True)
def do_show(self, and_focus):
if not self.tree:
self.init_panel(True)
else:
ed.cmd(cudatext_cmd.cmd_ShowSidePanelAsIs)
app_proc(PROC_SIDEPANEL_ACTIVATE, self.title)
@property
def selected(self):
return tree_proc(self.tree, TREE_ITEM_GET_SELECTED)
def add_context_menu_node(self, parent, action, name):
return menu_proc(parent, MENU_ADD, command=action, caption=name)
def generate_context_menu(self):
node_type = None
if self.selected is not None:
n = self.get_info(self.selected).image
if n == self.ICON_PROJ: node_type = NODE_PROJECT
elif n == self.ICON_DIR: node_type = NODE_DIR
elif n == self.ICON_BAD: node_type = NODE_BAD
else: node_type = NODE_FILE
if not self.h_menu:
self.h_menu = menu_proc(0, MENU_CREATE)
menu_all = self.h_menu
menu_proc(menu_all, MENU_CLEAR)
menu_proj = self.add_context_menu_node(menu_all, "0", _("Project file"))
menu_nodes = self.add_context_menu_node(menu_all, "0", _("Root nodes"))
if node_type == NODE_FILE:
menu_file = self.add_context_menu_node(menu_all, "0", _("Selected file"))
if node_type == NODE_DIR:
menu_dir = self.add_context_menu_node(menu_all, "0", _("Selected directory"))
for item in self.menuitems:
item_caption = item[0]
item_parent = item[1]
item_types = item[2]
item_action = item[3]
if node_type not in item_types:
continue
if item_parent == "proj":
menu_use = menu_proj
elif item_parent == "nodes":
menu_use = menu_nodes
elif item_parent == "file":
menu_use = menu_file
elif item_parent == "dir":
menu_use = menu_dir
else:
menu_use = menu_all
if item_action == "collect_recent_projects":
action = ""
else:
action = item_action
menu_added = self.add_context_menu_node(menu_use, action, item_caption)
if item_action == "collect_recent_projects":
for path in self.options["recent_projects"]:
if os.sep in path:
action = str.format("module=cuda_project_man;cmd=action_open_project;info={};", path)
self.add_context_menu_node(menu_added, action, nice_filename(path))
@staticmethod
def node_ordering(node):
# sort folders first, then by extension
path = Path(node)
return path.is_file(), path.suffix.upper(), path.name.upper()
@staticmethod
def node_ordering_direntry(path):
# node_ordering() for DirEntry
_, suffix = os.path.splitext(path.name)
return path.is_file(), suffix.upper(), path.name.upper()
def add_node(self, path):
if path:
if path in self.project["nodes"]:
return
msg_status(_("Adding to project: ") + collapse_filename(path), True)
self.project["nodes"].append(path)
self.project["nodes"].sort(key=Command.node_ordering)
self.action_refresh()
if self.project_file_path:
self.action_save_project_as(self.project_file_path)
def new_project(self):
self.project = dict(nodes=[])
self.project_file_path = None
self.update_global_data()
self.goto_history = []
app_proc(PROC_SET_FOLDER, '')
app_proc(PROC_SET_PROJECT, '')
def add_recent(self, path):
recent = self.options["recent_projects"]
if path in recent:
recent.pop(recent.index(path))
self.options["recent_projects"] = ([path] + recent)[:10]
self.generate_context_menu()
def action_new_file(self):
location = Path(self.get_location_by_index(self.selected))
if location.is_file():
location = location.parent
result = dlg_input(_("New file:"), "")
if not result:
return
if os.sep in result:
msg_status(_("Incorrect file name"))
return
path = location / result
path.touch()
self.action_refresh()
#open new file
self.jump_to_filename(str(path))
if os.path.isfile(str(path)):
_file_open(str(path))
def action_open_def(self):
fn = str(self.get_location_by_index(self.selected))
if not os.path.isfile(fn):
return
suffix = app_proc(PROC_GET_OS_SUFFIX, '')
if suffix=='':
#Windows
os.startfile(fn)
elif suffix=='__mac':
#macOS
os.system('open "'+fn+'"')
elif suffix=='__haiku':
#Haiku
msg_status('TODO: implemenet "Open in default app" for Haiku')
else:
#other Unixes
os.system('xdg-open "'+fn+'"')
def action_focus_in_fileman(self):
fn = str(self.get_location_by_index(self.selected))
if not os.path.isfile(fn):
return
suffix = app_proc(PROC_GET_OS_SUFFIX, '')
if suffix=='':
#Windows
os.system('explorer.exe /select,'+fn)
elif suffix=='__mac':
#macOS
fn = fn.replace(' ', '\\ ') #macOS cannot handle quoted filename
os.system('open --new --reveal '+fn)
elif suffix=='__haiku':
#Haiku
msg_status('"Focus in file manager" not implemented for this OS')
else:
#Linux and others
if which('nautilus'):
os.system('nautilus "'+fn+'"')
elif which('thunar'):
os.system('thunar "'+os.path.dirname(fn)+'"')
elif which('caja'):
os.system('caja "'+os.path.dirname(fn)+'"')
elif which('dolphin'):
os.system('dolphin --select --new-window "'+fn+'"')
else:
msg_status('"Focus in file manager" does not support your file manager')
def action_rename(self):
location = Path(self.get_location_by_index(self.selected))
result = dlg_input(_("Rename to"), str(location.name))
if not result:
return
new_location = location.parent / result
if location == new_location:
return
location.replace(new_location)
if location in self.top_nodes.values():
self.action_remove_node()
self.add_node(str(new_location))
self.action_refresh()
self.jump_to_filename(str(new_location))
msg_status(_("Renamed to: ") + str(new_location.name))
def action_delete_file(self):
location = Path(self.get_location_by_index(self.selected))
if msg_box(_("Delete file from disk:\n") + str(location), MB_OKCANCEL + MB_ICONWARNING) != ID_OK:
return
location.unlink()
if location in self.top_nodes.values():
self.action_remove_node()
else:
self.action_refresh()
self.jump_to_filename(str(location.parent))
msg_status(_("Deleted file: ") + str(location.name))
def do_delete_dir(self, location):
for path in location.glob("*"):
if path.is_file():
path.unlink()
elif path.is_dir():
self.do_delete_dir(path)
location.rmdir()
def action_delete_directory(self):
location = Path(self.get_location_by_index(self.selected))
if msg_box(_("Delete directory from disk:\n") + str(location), MB_OKCANCEL + MB_ICONWARNING) != ID_OK:
return
self.do_delete_dir(location)
if location in self.top_nodes.values():
self.action_remove_node()
else:
self.action_refresh()
self.jump_to_filename(str(location.parent))
msg_status(_("Deleted dir: ") + str(location.name))
def action_new_directory(self):
location = Path(self.get_location_by_index(self.selected))
if location.is_file():
location = location.parent
result = dlg_input(_("New directory"), "")
if not result:
return
location = location / result
location.mkdir()
self.action_refresh()
self.jump_to_filename(str(location))
def action_find_in_directory(self):
location = str(self.get_location_by_index(self.selected))
try:
import cuda_find_in_files as fif
msg_status(_('Called "Find in Files" for "%s"') % location)
fif.show_dlg(what="", opts={"fold": location})
except ImportError:
try:
from cuda_find_in_files4 import show_fif4
msg_status(_('Called "Find in Files 4" for "%s"') % location)
show_fif4({'with': {
'in_what': '',
'wk_fold': location,
'wk_incl': '*'
}})
except ImportError:
msg_box(_('Plugin "Find in Files" or "Find in Files 4" is not installed, install it first'), MB_OK + MB_ICONERROR)
def action_refresh(self, parent=None):
# it was hard to add TREE_LOCK/UNLOCK directly into action_refresh_int
tree_proc(self.tree, TREE_LOCK)
try:
self.action_refresh_int(parent)
finally:
tree_proc(self.tree, TREE_UNLOCK)
def action_refresh_int(self, parent=None):
unfold = parent is None
if parent is None:
# clear tree
tree_proc(self.tree, TREE_ITEM_DELETE, 0)
if self.project_file_path is None:
project_name = PROJECT_UNSAVED_NAME
else:
project_name = self.project_file_path.stem
parent = tree_proc(
self.tree,
TREE_ITEM_ADD,
0,
-1,
project_name,
self.ICON_PROJ,
)
#select 1st node
items_root = tree_proc(self.tree, TREE_ITEM_ENUM, 0)
tree_proc(self.tree, TREE_ITEM_SELECT, items_root[0][0])
nodes = map(Path, self.project["nodes"])
self.top_nodes = {}
else:
fn = str(self.get_location_by_index(parent)) # str() is required for old Python 3.5 for os.scandir()
if not fn: return
#print('Reading dir:', fn)
try:
nodes = sorted(os.scandir(fn), key=Command.node_ordering_direntry)
except:
tree_proc(self.tree, TREE_ITEM_SET_ICON, parent, image_index=self.ICON_BAD)
raise # good to see the error
return
for path in nodes:
# DirEntry or Path?
if isinstance(path, Path):
spath = str(path)
else:
spath = path.path
is_dir = path.is_dir()
sname = path.name
if is_win_root(spath):
sname = spath
elif self.options.get("no_hidden", True) and is_hidden(spath):
continue
elif self.is_filename_ignored(spath, is_dir):
continue
if is_locked(spath):
imageindex = self.ICON_BAD
elif is_dir:
imageindex = self.ICON_DIR
elif is_simple_listed(path.name, MASKS_IMAGES):
imageindex = self.ICON_IMG
elif is_simple_listed(path.name, MASKS_ZIP):
imageindex = self.ICON_ZIP
elif is_simple_listed(path.name, MASKS_BINARY):
imageindex = self.ICON_BIN
else:
lexname = lexer_proc(LEXER_DETECT, path.name)
if lexname:
imageindex = self.icon_get(lexname)
else:
imageindex = self.ICON_ALL
index = tree_proc(
self.tree,
TREE_ITEM_ADD,
parent,
-1,
sname,
imageindex,
data=spath
)
if nodes is self.project["nodes"]:
self.top_nodes[index] = Path(spath)
# dummy nested node for folders
if imageindex == self.ICON_DIR:
tree_proc(
self.tree,
TREE_ITEM_ADD,
index,
-1,
'dummy',
-1
)
if unfold:
tree_proc(self.tree, TREE_ITEM_UNFOLD, parent)
def action_new_project(self):
self.new_project()
self.action_refresh()
def action_open_project(self, info=None):
path = info
if path is None:
path = dlg_file(True, "", "", PROJECT_DIALOG_FILTER)
if path:
proj_dir = os.path.dirname(path)
def expand_macros(s):
return s.replace('{ProjDir}', proj_dir, 1)
if Path(path).exists():
print(_('Loading project: ') + collapse_filename(path))
with open(path, encoding='utf8') as fin:
self.project = json.load(fin)
if 'nodes' in self.project:
for i in range(len(self.project['nodes'])):
self.project['nodes'][i] = expand_macros(self.project['nodes'][i])
#print('Loaded project:', self.project)
self.project_file_path = Path(path)
self.add_recent(path)
self.action_refresh()
self.save_options()
self.update_global_data()
self.goto_history = []
for fn in self.project["nodes"]:
if os.path.isdir(fn):
app_proc(PROC_SET_FOLDER, fn)
break
app_proc(PROC_SET_PROJECT, path)
msg_status(_("Project opened: ") + path)
else:
msg_status(_("Project filename is not found: ") + path)
def action_add_folder(self):
fn = dlg_dir("")
self.add_node(fn)
def action_add_file(self):
fn = dlg_file(True, "", "", "")
self.add_node(fn)
def action_remove_node(self):
index = self.selected
while True:
prop = tree_proc(self.tree, TREE_ITEM_GET_PROPS, index)
if prop["level"] == 0:
return
if prop["level"] == 1:
path = prop["data"]
break
index = prop["parent"]
tree_proc(self.tree, TREE_ITEM_DELETE, index)
if str(path) in self.project["nodes"]:
self.project["nodes"].remove(str(path))
if index in self.top_nodes:
self.top_nodes.pop(index)
if self.project_file_path:
self.action_save_project_as(self.project_file_path)
def action_clear_project(self):
self.project["nodes"].clear()
self.action_refresh()
def action_set_as_main_file(self):
path = self.get_location_by_index(self.selected)
self.project["mainfile"] = str(path)
self.update_global_data()
if self.project_file_path:
self.action_save_project_as(self.project_file_path)
def action_save_project_as(self, path=None):
need_refresh = path is None
if path is None:
if self.project_file_path:
project_path = str(self.project_file_path.parent)
else:
project_path = ""
path = dlg_file(False, "", project_path, PROJECT_DIALOG_FILTER)
if path:
proj_dir = os.path.dirname(path)
def collapse_macros(s):
fn = s
if (fn+os.sep).startswith(proj_dir+os.sep):
fn = fn.replace(proj_dir, '{ProjDir}', 1)
return fn
path = Path(path)
if path.suffix != PROJECT_EXTENSION:
path = path.parent / (path.name + PROJECT_EXTENSION)
# pre-processing of dict before saving
d = copy.deepcopy(self.project)
if 'nodes' in d:
for i in range(len(d['nodes'])):
d['nodes'][i] = collapse_macros(d['nodes'][i])
self.project_file_path = path
with path.open("w", encoding='utf8') as fout:
json.dump(d, fout, indent=4)
self.update_global_data()
print(_('Saving project: ') + collapse_filename(str(path)))
msg_status(_("Project saved"))
if need_refresh:
self.add_recent(str(path))
self.action_refresh()
self.save_options()
def action_go_to_file(self):
self.menu_goto()
def action_project_properties(self):
self.config_proj()
def action_config(self):
self.config()
def menu_cfg(self):
if self.h_menu_cfg is None:
self.h_menu_cfg = menu_proc(0, MENU_CREATE)
menu_proc(self.h_menu_cfg, MENU_ADD, command='cuda_project_man.action_project_properties', caption=_('Project properties...'))
menu_proc(self.h_menu_cfg, MENU_ADD, command='cuda_project_man.action_config', caption=_('Project Manager options...'))
menu_proc(self.h_menu_cfg, MENU_SHOW)
def update_global_data(self):
global global_project_info
global_project_info['filename'] = str(self.project_file_path) if self.project_file_path else ''
global_project_info['nodes'] = self.project['nodes']
global_project_info['vars'] = self.project.setdefault('vars', [])
global_project_info['mainfile'] = self.project.setdefault('mainfile', '')
def get_info(self, index):
if index is None:
return
info = tree_proc(self.tree, TREE_ITEM_GET_PROPS, index)
if info:
return NodeInfo(info['text'], info['icon'])
def get_location_by_index(self, index):
'''
path = []
while index and index not in self.top_nodes:
path.append(self.get_info(index).caption)
index = tree_proc(self.tree, TREE_ITEM_GET_PROPS, index)['parent']
path.reverse()
node = self.top_nodes.get(index, None)
full_path = Path(node / str.join(os.sep, path)) if node else Path('')
return full_path
'''
p = tree_proc(self.tree, TREE_ITEM_GET_PROPS, index)
return Path(p.get('data', ''))
def save_options(self):
with self.options_filename.open(mode="w", encoding='utf8') as fout:
json.dump(self.options, fout, indent=4)
def menu_recents(self):
items = self.options["recent_projects"]
if not items:
return
items_nice = [os.path.basename(fn)+'\t'+os.path.dirname(fn) for fn in items]
res = dlg_menu(DMENU_LIST, items_nice, caption=_('Recent projects'))
if res is None:
return
self.init_panel()
self.action_open_project(items[res])
def do_unfold_first(self):
"""unfold 1st item under root"""
items = tree_proc(self.tree, TREE_ITEM_ENUM, 0)
if not items:
return
items = tree_proc(self.tree, TREE_ITEM_ENUM, items[0][0])
if not items:
return
tree_proc(self.tree, TREE_ITEM_UNFOLD, items[0][0])
tree_proc(self.tree, TREE_ITEM_SELECT, items[0][0])
def new_project_open_dir(self):
fn = dlg_dir("")
if fn is None: return
if is_locked(fn):
print(_('Project Manager: folder is locked: ') + fn)
return
self.init_panel()
self.action_new_project()
self.add_node(fn)
self.do_unfold_first()
app_proc(PROC_SIDEPANEL_ACTIVATE, self.title)
def open_dir(self, dirname, new_proj=False):
if not os.path.isdir(dirname):
print(_('Project Manager: folder not found: ') + dirname)
return
#expand "." to fully qualified name
dirname = os.path.abspath(dirname)
if is_locked(dirname):
print(_('Project Manager: folder is locked: ') + dirname)
return
self.init_panel()
if new_proj:
self.action_new_project()
self.add_node(dirname)
if new_proj:
self.do_unfold_first()
app_proc(PROC_SIDEPANEL_ACTIVATE, self.title)
def on_open_pre(self, ed_self, filename):
if filename.endswith(PROJECT_EXTENSION):
self.init_panel()
self.action_open_project(filename)
return False #block opening
def config(self):
from .projman_dlg import dialog_config
if dialog_config(self.options):
print(_('ProjectManager: saving options'))
self.save_options()
if self.h_dlg:
dlg_proc(self.h_dlg, DLG_CTL_PROP_SET, name='bar', prop={
'vis': self.options.get('toolbar', True)
})
ev = []
if self.options['on_start']:
ev += ['on_start']
if self.options['check_git']:
ev += ['on_open']
if ev:
ini_write('plugins.ini', 'events', 'cuda_project_man', ','.join(ev))
else:
ini_proc(INI_DELETE_KEY, 'plugins.ini', 'events', 'cuda_project_man')
def config_proj(self):
if not self.tree:
msg_status(_('Project not loaded'))
return
from .projman_dlg import dialog_proj_prop
if dialog_proj_prop(self.project):
self.update_global_data()
if self.project_file_path:
self.action_save_project_as(self.project_file_path)
def is_filename_ignored(self, fn, is_dir):
if is_dir:
msk = self.options.get("no_dirs", "")
else:
msk = self.options.get("no_files", "")
if msk:
return is_mask_listed(os.path.basename(fn), msk)
else:
return False
def on_start(self, ed_self):
and_activate = self.options.get("on_start_activate", False)
self.init_panel(and_activate)
items = self.options.get("recent_projects", [])
if items:
self.action_open_project(items[0])
def contextmenu_add_dir(self):
self.init_panel()
self.action_add_folder()
def contextmenu_add_file(self):
self.init_panel()
self.action_add_file()
def contextmenu_new_proj(self):
self.init_panel()
self.action_new_project()
def contextmenu_open_proj(self):
self.init_panel()
self.action_open_project()
def contextmenu_save_proj_as(self):
self.init_panel()
self.action_save_project_as()
def contextmenu_refresh(self):
self.init_panel()
self.action_refresh()
def contextmenu_remove_node(self):
self.init_panel()
self.action_remove_node()
def contextmenu_clear_proj(self):
self.init_panel()
self.action_clear_project()
def contextmenu_set_as_main_file(self):
self.init_panel()
self.action_set_as_main_file()
def enum_all(self, callback):
"""
Callback for all items.
Until callback gets false.
"""
items = tree_proc(self.tree, TREE_ITEM_ENUM, 0)
if items:
return self.enum_subitems(items[0][0], callback)
def enum_subitems(self, item, callback):
"""
Callback for all subitems of given item.
Until callback gets false.
"""
items = tree_proc(self.tree, TREE_ITEM_ENUM_EX, item)
if items:
for i in items:
subitem = i['id']
fn = i.get('data', '')
if not callback(fn, subitem):
return False
if not self.enum_subitems(subitem, callback):
return False
return True
def enum_all_fn(self, filename, and_open):
"""
Callback for all items.
Find 'filename', and focus its node.
"""
items = tree_proc(self.tree, TREE_ITEM_ENUM, 0)
if items:
return self.enum_subitems_fn(items[0][0], filename, and_open)
def enum_subitems_fn(self, item_src, filename, and_open):
"""
Callback for all subitems of given item_src.
When found 'filename', focus it and return False
"""
def _need(dirpath):
return filename.startswith(dirpath+os.sep)
prop_list = tree_proc(self.tree, TREE_ITEM_ENUM_EX, item_src) or []
for prop in prop_list:
fn = prop['data']
is_dir = prop['sub_items']
if is_dir:
if _need(fn):
node = prop['id']
tree_proc(self.tree, TREE_ITEM_UNFOLD, node)
if not self.enum_subitems_fn(node, filename, and_open):
return False
elif fn==filename:
node = prop['id']
tree_proc(self.tree, TREE_ITEM_SELECT, node)
tree_proc(self.tree, TREE_ITEM_SHOW, node)
if is_dir:
tree_proc(self.tree, TREE_ITEM_UNFOLD, node)
if and_open:
_file_open(fn)
return False
return True
def menu_goto(self):
""" Show menu-dialog with all files in project, and jump to chosen file """
if not self.tree:
msg_status(_('Project not opened'))
return
files = self.enum_all_files()
if not files:
msg_status(_('Project is empty'))
return
files.sort()
files = self.goto_history + files
files_nice = [os.path.basename(fn)+'\t'+collapse_filename(os.path.dirname(fn)) for fn in files]
res = dlg_menu(DMENU_LIST_ALT+DMENU_NO_FULLFILTER, #fuzzy search is needed for users
files_nice,
caption=_('Go to file')
)
if res is None:
return
fn = files[res]
if fn in self.goto_history:
self.goto_history.remove(fn)
self.goto_history.insert(0, fn)
and_open = self.options.get('goto_open', False)
self.jump_to_filename(fn, and_open)
def jump_to_filename(self, filename, and_open=False):
""" Find filename in entire project and focus its tree node """
msg_status(_('Jumping to: ') + filename)
return self.enum_all_fn(filename, and_open)
def sync_to_ed(self):
""" Jump to active editor file, if it's in project """
if not self.tree:
msg_status(_('Project not loaded'))
return
fn = ed.get_filename()
if fn:
if self.jump_to_filename(fn): #gets False if found
msg_status(_('Cannot jump to file: ') + fn)
def tree_on_unfold(self, id_dlg, id_ctl, data='', info=''):
info = self.get_info(data)
path = self.get_location_by_index(data)
if not path.is_dir():
tree_proc(self.tree, TREE_ITEM_DELETE, data)
if str(path) in self.project["nodes"]:
self.project["nodes"].remove(str(path))
return
if info.image != self.ICON_DIR:
return
items = tree_proc(self.tree, TREE_ITEM_ENUM, data)
if items:
for handle, _ in items:
tree_proc(self.tree, TREE_ITEM_DELETE, handle)
self.action_refresh(data)
def tree_on_menu(self, id_dlg, id_ctl, data='', info=''):
self.generate_context_menu()
menu_proc(self.h_menu, MENU_SHOW, command='')
def do_open_current_file(self, options):
info = self.get_info(self.selected)
if not info:
return
path = self.get_location_by_index(self.selected)
if not path:
return
if info.image in [self.ICON_BAD, self.ICON_DIR, self.ICON_PROJ]:
return
if not os.path.isfile(str(path)):
tree_proc(self.tree, TREE_ITEM_SET_ICON, self.selected, image_index=self.ICON_BAD)
return
_file_open(str(path), options=options)
def get_open_options(self):
s = '/preview' if self.options.get('preview', True) else ''
s += ' /nozip /nontext-view-text'
return s
def tree_on_click(self, id_dlg, id_ctl, data='', info=''):
# set folder in project as current folder for Open/Save-as dialogs
node = self.selected
if not node: # may be from some OnClick events
return
s = str(self.get_location_by_index(node))
if s and not s.startswith('.'): # skip parasitic '.' for project root node
if os.path.isdir(s):
app_proc(PROC_SET_FOLDER, s)
elif os.path.isfile(s):
app_proc(PROC_SET_FOLDER, os.path.dirname(s))
if self.options.get('d_click', False):
return
self.do_open_current_file(self.get_open_options())
def tree_on_click_dbl(self, id_dlg, id_ctl, data='', info=''):
if not self.options.get('d_click', False):
#turn off 'preview' tab kind on dbl-click
ed.set_prop(PROP_PREVIEW, False)
return
self.do_open_current_file(self.get_open_options())
def set_imagelist_size(self, theme_name, imglist):
res = re.match('^\S+x(\d+)$', theme_name)
if not res:
return msg_box(_('Project Manager: bad icons folder name: "%s"') % theme_name, MB_OK+MB_ICONERROR)
n = int(res.group(1))
if not 8<=n<=64:
return msg_box(_('Project Manager: bad icons size: "%s"') % theme_name, MB_OK+MB_ICONERROR)
imagelist_proc(imglist, IMAGELIST_SET_SIZE, (n, n))
def icon_init(self):
self.icon_theme = self.options.get('icon_theme', 'vscode_16x16')
self.set_imagelist_size(self.icon_theme, self.tree_imglist)
self.icon_dir = os.path.join(app_path(APP_DIR_DATA), 'filetypeicons', self.icon_theme)
if not os.path.isdir(self.icon_dir):
self.icon_dir = os.path.join(app_path(APP_DIR_DATA), 'filetypeicons', 'vscode_16x16')
self.icon_json = os.path.join(self.icon_dir, 'icons.json')
self.icon_json_dict = json.loads(open(self.icon_json).read())
self.icon_indexes = {}
def icon_get(self, key):
s = self.icon_indexes.get(key, None)
if s:
return s
fn = self.icon_json_dict.get(key, None)
if fn is None:
n = self.ICON_ALL
self.icon_indexes[key] = n
return n
fn = os.path.join(self.icon_dir, fn)
n = imagelist_proc(self.tree_imglist, IMAGELIST_ADD, value=fn)
if n is None:
print(_('ProjectManager: incorrect filetype icon:'), fn)
n = self.ICON_ALL
self.icon_indexes[key] = n
return n
def form_key_down(self, id_dlg, id_ctl, data):
if id_ctl==13: #Enter
self.do_open_current_file(self.get_open_options())
return False #block key
def add_current_file(self):
if not self.tree:
self.init_panel(False)
fn = ed.get_filename()
self.add_node(fn)
def add_opened_files(self):
if not self.tree:
self.init_panel(False)
for h in ed_handles():
e = Editor(h)
fn = e.get_filename()
self.add_node(fn)
def goto_main(self):
if not self.tree:
msg_status(_('Project not opened'))
return
fn = self.project.get('mainfile', '')
if not fn:
msg_status(_('Project main file is not set'))
return
self.jump_to_filename(fn)
def open_main(self):
fn = self.project.get('mainfile', '')
if fn:
_file_open(fn)
else:
msg_status(_('Project main file is not set'))
def enum_all_files(self):
files, dirs = [], []
for root in self.project['nodes']:
if os.path.isdir(root):
dirs.append(root)
elif os.path.isfile(root):
files.append(root)
while dirs:
try:
next_dir = dirs.pop(0)
for found in os.scandir(next_dir):
# Ignoring symlinks prevents infinite loops with cyclic directory layouts
if found.is_dir() and not found.is_symlink() and not self.is_filename_ignored(found.path, True):
dirs.append(found.path)
elif found.is_file() and not self.is_filename_ignored(found.path, False):
files.append(found.path)
except (OSError, FileNotFoundError):
pass # Permissions issue. Not much we can do
return files
def open_all(self):
if not self.tree:
msg_status(_('Project not opened'))
return
files = self.enum_all_files()
if not files:
msg_status(_('Project is empty'))
return
if msg_box(_('Open all %d file(s) in editor?') % len(files), MB_OKCANCEL+MB_ICONQUESTION)!=ID_OK:
return
for (i, fn) in enumerate(files):
_file_open(fn, options="/nontext-cancel")
if i%10==0:
app_idle(False)
def on_open(self, ed_self):
self.init_panel(False)
if not self.project_file_path:
self.action_project_for_git(ed_self.get_filename('*'))
def action_project_for_git(self, filename):
dir = os.path.dirname(filename)
while True:
fn = os.path.join(dir, '.git')
fn2 = os.path.join(dir, '.svn')
if os.path.isdir(fn) or os.path.isdir(fn2):
self.init_panel()
self.new_project()
self.add_node(dir)
self.jump_to_filename(filename)
return
d = os.path.dirname(dir)
if d=='/':
return
if d==dir:
return
dir = d
|
Alexey-T/CudaText
|
app/py/cuda_project_man/__init__.py
|
Python
|
mpl-2.0
| 46,819
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010 Romain Bignon
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
from .browser import DLFP
from .backend import DLFPBackend
__all__ = ['DLFP', 'DLFPBackend']
|
jocelynj/weboob
|
weboob/backends/dlfp/__init__.py
|
Python
|
gpl-3.0
| 811
|
from vusion.persist import ModelManager
class TemplateManager(ModelManager):
def __init__(self, db, collection_name, **kwargs):
super(TemplateManager, self).__init__(db, collection_name, **kwargs)
|
texttochange/vusion-backend
|
vusion/persist/template/template_manager.py
|
Python
|
bsd-3-clause
| 215
|
import os
import json
import yaml
import base64
import requests
class TWAPI:
def __init__(self, config_file='../config.yml'):
#print os.getcwd()
self.config_file = config_file
self.key = ''
self.secret = ''
self.bearer = ''
self.load_conf()
#self.show_conf()
def load_conf(self):
fd = open(self.config_file, 'r')
conf = yaml.load(fd)
self.key = conf['TwitterKey']
self.secret = conf['TwitterSecret']
fd.close()
self.get_bearer_token()
def show_conf(self):
print 'Twitter Key: ' + self.key
print 'Twitter Secret: ' + self.secret
print 'Twitter Bearer Token: ' + self.bearer
def get_bearer_token(self):
self.bearer = base64.b64encode('%s:%s' % (self.key, self.secret))
def search(self, query='', token='', max_posts=100, max_id=0):
if max_posts == 0:
return []
count = min(max_posts, 100)
url = 'https://api.twitter.com/1.1/search/tweets.json'
headers = {
'Authorization': 'Bearer ' + token
}
payload = {
'result_type': 'recent',
'q': query,
'count': count,
'max_id': max_id
}
ret_data = []
r = requests.get(url, params=payload, headers=headers)
#print r, r.text
if r.status_code == 200:
ret_data = [status for status in r.json()["statuses"]]
max_id = int(ret_data[-1]["id"]) - 1
ret_data = ret_data + self.search(query=query, token=token, max_posts=max_posts-count, max_id=max_id)
return ret_data
def users_info(self, users='', token='', include_entities=False):
url = 'https://api.twitter.com/1.1/users/lookup.json'
headers = {
'Authorization': 'Bearer ' + token
}
payload = {
'screen_name': users,
'include_entities': include_entities,
}
ret_data = []
r = requests.get(url, params=payload, headers=headers)
#print r, r.text
if r.status_code == 200:
ret_data = [user for user in r.json()]
return ret_data
def get_tweets(self, query='', count=100):
url = 'https://api.twitter.com/oauth2/token'
payload = {
'grant_type': 'client_credentials'
}
headers = {
'Authorization': 'Basic ' + self.bearer,
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
}
r = requests.post(url, data=payload, headers=headers)
if r.status_code == 200:
token = r.json()["access_token"]
return self.search(query=query, token=token, max_posts=count)
else:
print r.status_code
def get_users(self, users='', count=10):
url = 'https://api.twitter.com/oauth2/token'
payload = {
'grant_type': 'client_credentials'
}
headers = {
'Authorization': 'Basic ' + self.bearer,
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
}
r = requests.post(url, data=payload, headers=headers)
if r.status_code == 200:
token = r.json()["access_token"]
return self.users_info(users=users, token=token)
return user
else:
print r.status_code
if __name__ == '__main__':
tw = TWAPI()
tw.get_()
|
gr33ndata/19898
|
nineteen898/twapi.py
|
Python
|
mit
| 3,510
|
"""
Copyright 2010 Jason Chu, Dusty Phillips, and Phil Schalm
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
section_re = re.compile(r'\${([^:]*):([^}]*)}')
def expand_value(value):
"""
Converts ${buildout:directory}/something/something into
(('buildout', 'directory'), '/something/something)
"""
tokens = section_re.split(value)
token_list = []
if tokens[0]:
token_list.append(tokens[0])
steps = (len(tokens) - 1)/3
for step in range(steps):
token_list.append(tuple(tokens[step*3+1:step*3+3]))
if tokens[step*3+3]:
token_list.append(tokens[step*3+3])
return tuple(token_list)
def compress_value(value):
"""
Converts (('buildout', 'directory'), '/something/something) into
${buildout:directory}/something/something
"""
data_list = []
for elem in value:
if isinstance(elem, tuple):
data_list.append('${%s:%s}' % elem)
else:
data_list.append(elem)
return ''.join(data_list)
def simple_property_get(name):
def get(self):
if name not in self.section:
return ''
value = self.section[name]
return value
return get
def simple_property_set(name):
def set(self, value):
self.section[name] = value
return set
def simple_property_delete(name):
def delete(self):
if name in self.section:
del self.section[name]
return delete
simple_property = lambda name: property(simple_property_get(name), simple_property_set(name), simple_property_delete(name))
def bool_property_get(name):
def get(self):
if name not in self.section:
return ''
value = self.section[name]
if value.lower() == 'true':
return True
return False
return get
def bool_property_set(name):
def set(self, value):
if value == True:
value = 'true'
else:
value = 'false'
self.section[name] = value
return set
bool_property = lambda name: property(bool_property_get(name), bool_property_set(name), simple_property_delete(name))
|
pnomolos/greatbigcrane
|
greatbigcrane/buildout_manage/recipetools.py
|
Python
|
apache-2.0
| 2,626
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Management script."""
import os
from glob import glob
from subprocess import call
from flask_migrate import Migrate, MigrateCommand
from flask_script import Command, Manager, Option, Server, Shell
from flask_script.commands import Clean, ShowUrls
from fansubs_cms.app import create_app
from fansubs_cms.database import db
from fansubs_cms.settings import DevConfig, ProdConfig
from fansubs_cms.user.models import User
CONFIG = (
ProdConfig
if os.environ.get('FANSUBS_CMS_ENV') == 'prod'
else DevConfig
)
HERE = os.path.abspath(os.path.dirname(__file__))
TEST_PATH = os.path.join(HERE, 'tests')
app = create_app(CONFIG)
manager = Manager(app)
migrate = Migrate(app, db)
def _make_context():
"""Return context dict for a shell session.
So you can accessapp, db, and the User model by default.
"""
return {'app': app, 'db': db, 'User': User}
@manager.command
def test():
"""Run the tests."""
import pytest
exit_code = pytest.main([TEST_PATH, '--verbose'])
return exit_code
class Lint(Command):
"""Lint and check code style with flake8 and isort."""
def get_options(self):
"""Command line options."""
return (
Option(
'-f', '--fix-imports',
action='store_true',
dest='fix_imports',
default=False,
help='Fix imports using isort, before linting'
),
)
def run(self, fix_imports):
"""Run command."""
skip = ['requirements', 'migrations']
root_files = glob('*.py')
root_directories = [
name for name in
next(os.walk('.'))[1] if not name.startswith('.')
]
files_and_directories = [
arg for arg in root_files + root_directories if arg not in skip
]
def execute_tool(description, *args):
"""Execute a checking tool with its arguments."""
command_line = list(args) + files_and_directories
print('{}: {}'.format(description, ' '.join(command_line)))
rv = call(command_line)
if rv is not 0:
exit(rv)
if fix_imports:
execute_tool('Fixing import order', 'isort', '-rc')
execute_tool('Checking code style', 'flake8')
manager.add_command('server', Server())
manager.add_command('shell', Shell(make_context=_make_context))
manager.add_command('db', MigrateCommand)
manager.add_command('urls', ShowUrls())
manager.add_command('clean', Clean())
manager.add_command('lint', Lint())
if __name__ == '__main__':
manager.run()
|
dyzajash/fansubs_cms
|
manage.py
|
Python
|
bsd-3-clause
| 2,659
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
#
# Copyright (c) 2008-2011 University of Dundee.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: Aleksandra Tarkowska <A(dot)Tarkowska(at)dundee(dot)ac(dot)uk>, 2008.
#
# Version: 1.0
#
import datetime
import time
import logging
import traceback
from django.conf import settings
from django import forms
from django.forms.widgets import Textarea
from django.forms.widgets import HiddenInput
from django.core.urlresolvers import reverse
from omeroweb.custom_forms import NonASCIIForm
from custom_forms import UrlField, MetadataModelChoiceField, \
AnnotationModelMultipleChoiceField, \
ObjectModelMultipleChoiceField
from omeroweb.webadmin.custom_forms import ExperimenterModelChoiceField, \
ExperimenterModelMultipleChoiceField, \
GroupModelMultipleChoiceField, GroupModelChoiceField
logger = logging.getLogger(__name__)
##################################################################
# Static values
# TODO: change to reverse
help_button = "%swebgateway/img/help16.png" % settings.STATIC_URL
help_wiki = '<span id="markup" title="Markups - <small>If you\'d like to include URL please type:<br/><b>http://www.openmicroscopy.org.uk/</b></small>"><img src="%s" /></span>' % help_button
help_wiki_c = '<span id="markup_c" title="Markups - <small>If you\'d like to include URL please type:<br/><b>http://www.openmicroscopy.org.uk/</b></small>"><img src="%s" /></span>' % help_button
help_enable = '<span id="enable" title="Enable/Disable - <small>This option allows the owner to keep the access control of the share.</small>"><img src="%s" /></span>' % help_button
help_expire = '<span id="expire" title="Expire date - <small>This date defines when share will stop being available. Date format: YY-MM-DD.</small>"><img src="%s" /></span>' % help_button
#################################################################
# Non-model Form
class GlobalSearchForm(NonASCIIForm):
search_query = forms.CharField(widget=forms.TextInput(attrs={'size':25}))
class ShareForm(NonASCIIForm):
def __init__(self, *args, **kwargs):
super(ShareForm, self).__init__(*args, **kwargs)
try:
if kwargs['initial']['shareMembers']: pass
self.fields['members'] = ExperimenterModelMultipleChoiceField(queryset=kwargs['initial']['experimenters'], initial=kwargs['initial']['shareMembers'], widget=forms.SelectMultiple(attrs={'size':5}))
except:
self.fields['members'] = ExperimenterModelMultipleChoiceField(queryset=kwargs['initial']['experimenters'], widget=forms.SelectMultiple(attrs={'size':5}))
self.fields.keyOrder = ['message', 'expiration', 'enable', 'members']#, 'guests']
message = forms.CharField(widget=forms.Textarea(attrs={'rows': 7, 'cols': 39}), help_text=help_wiki_c)
expiration = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':20}), label="Expire date", help_text=help_expire, required=False)
enable = forms.CharField(widget=forms.CheckboxInput(attrs={'size':1}), required=False, help_text=help_enable)
#guests = MultiEmailField(required=False, widget=forms.TextInput(attrs={'size':75}))
def clean_expiration(self):
if self.cleaned_data['expiration'] is not None and len(self.cleaned_data['expiration']) < 1:
return None
if self.cleaned_data['expiration'] is not None:
d = str(self.cleaned_data['expiration']).rsplit("-")
try:
date = datetime.datetime.strptime(("%s-%s-%s" % (d[0],d[1],d[2])), "%Y-%m-%d")
except:
raise forms.ValidationError('Date is in the wrong format. YY-MM-DD')
if time.mktime(date.timetuple()) <= time.time():
raise forms.ValidationError('Expire date must be in the future.')
return self.cleaned_data['expiration']
class BasketShareForm(ShareForm):
def __init__(self, *args, **kwargs):
super(BasketShareForm, self).__init__(*args, **kwargs)
try:
self.fields['image'] = GroupModelMultipleChoiceField(queryset=kwargs['initial']['images'], initial=kwargs['initial']['selected'], widget=forms.SelectMultiple(attrs={'size':10}))
except:
self.fields['image'] = GroupModelMultipleChoiceField(queryset=kwargs['initial']['images'], widget=forms.SelectMultiple(attrs={'size':10}))
class ContainerForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={'size':45}))
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 2, 'cols': 49}), required=False, help_text=help_wiki)
class ContainerNameForm(NonASCIIForm):
name = forms.CharField(max_length=250, widget=forms.TextInput(attrs={'size':45}))
class ContainerDescriptionForm(NonASCIIForm):
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 3, 'cols': 39}), required=False)
class BaseAnnotationForm(NonASCIIForm):
"""
This is the superclass of the various forms used for annotating single or multiple objects.
All these forms use hidden fields to specify the object(s) currently being annotated.
"""
def __init__(self, *args, **kwargs):
super(BaseAnnotationForm, self).__init__(*args, **kwargs)
images = 'images' in kwargs['initial'] and kwargs['initial']['images'] or list()
if len(images) > 0:
try:
self.fields['image'] = ObjectModelMultipleChoiceField(queryset=images, initial=kwargs['initial']['selected']['images'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['image'] = ObjectModelMultipleChoiceField(queryset=images, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
datasets = 'datasets' in kwargs['initial'] and kwargs['initial']['datasets'] or list()
if len(datasets) > 0:
try:
self.fields['dataset'] = ObjectModelMultipleChoiceField(queryset=datasets, initial=kwargs['initial']['selected']['datasets'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['dataset'] = ObjectModelMultipleChoiceField(queryset=datasets, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
projects = 'projects' in kwargs['initial'] and kwargs['initial']['projects'] or list()
if len(projects) > 0:
try:
self.fields['project'] = ObjectModelMultipleChoiceField(queryset=projects, initial=kwargs['initial']['selected']['projects'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['project'] = ObjectModelMultipleChoiceField(queryset=projects, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
screens = 'screens' in kwargs['initial'] and kwargs['initial']['screens'] or list()
if len(screens) > 0:
try:
self.fields['screen'] = ObjectModelMultipleChoiceField(queryset=screens, initial=kwargs['initial']['selected']['screens'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['screen'] = ObjectModelMultipleChoiceField(queryset=screens, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
plates = 'plates' in kwargs['initial'] and kwargs['initial']['plates'] or list()
if len(plates) > 0:
try:
self.fields['plate'] = ObjectModelMultipleChoiceField(queryset=plates, initial=kwargs['initial']['selected']['plates'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['plate'] = ObjectModelMultipleChoiceField(queryset=plates, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
acquisitions = 'acquisitions' in kwargs['initial'] and kwargs['initial']['acquisitions'] or list()
if len(acquisitions) > 0:
try:
self.fields['acquisition'] = ObjectModelMultipleChoiceField(queryset=acquisitions, initial=kwargs['initial']['selected']['acquisitions'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['acquisition'] = ObjectModelMultipleChoiceField(queryset=acquisitions, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
wells = 'wells' in kwargs['initial'] and kwargs['initial']['wells'] or list()
if len(wells) > 0:
try:
self.fields['well'] = ObjectModelMultipleChoiceField(queryset=wells, initial=kwargs['initial']['selected']['wells'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['well'] = ObjectModelMultipleChoiceField(queryset=wells, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
shares = 'shares' in kwargs['initial'] and kwargs['initial']['shares'] or list()
if len(shares) > 0:
try:
self.fields['share'] = ObjectModelMultipleChoiceField(queryset=shares, initial=kwargs['initial']['selected']['shares'], widget=forms.SelectMultiple(attrs={'size':10}), required=False)
except:
self.fields['share'] = ObjectModelMultipleChoiceField(queryset=shares, widget=forms.SelectMultiple(attrs={'size':10}), required=False)
class TagsAnnotationForm(BaseAnnotationForm):
""" Form for annotating one or more objects with existing Tags or New tags """
def __init__(self, *args, **kwargs):
super(TagsAnnotationForm, self).__init__(*args, **kwargs)
self.fields['tags'] = AnnotationModelMultipleChoiceField(queryset=kwargs['initial']['tags'],
widget=forms.SelectMultiple(attrs={'size':6, 'class':'existing'}), required=False)
tag = forms.CharField(widget=forms.TextInput(attrs={'size':36}), required=False)
description = forms.CharField(widget=forms.Textarea(attrs={'rows': 3, 'cols': 31}), required=False, label="Desc")
class FilesAnnotationForm(BaseAnnotationForm):
def __init__(self, *args, **kwargs):
super(FilesAnnotationForm, self).__init__(*args, **kwargs)
self.fields['files'] = AnnotationModelMultipleChoiceField(queryset=kwargs['initial']['files'], widget=forms.SelectMultiple(attrs={'size':8, 'class':'existing'}), required=False)
annotation_file = forms.FileField(required=False)
class CommentAnnotationForm(BaseAnnotationForm):
comment = forms.CharField(widget=forms.Textarea(attrs={'rows': 2, 'cols': 39}))
class UsersForm(forms.Form):
def __init__(self, *args, **kwargs):
super(UsersForm, self).__init__(*args, **kwargs)
try:
empty_label = kwargs['initial']['empty_label']
except:
empty_label='---------'
try:
menu = kwargs['initial']['menu']
except:
menu = '----------'
try:
user = kwargs['initial']['user']
except:
user = None
users = kwargs['initial']['users']
self.fields['experimenter'] = ExperimenterModelChoiceField(queryset=users, initial=user, widget=forms.Select(attrs={'onchange':'window.location.href=\''+reverse(viewname="load_template", args=[menu])+'?experimenter=\'+this.options[this.selectedIndex].value'}), required=False, empty_label=empty_label)
if users is None or len(users)<2:
self.fields['experimenter'].widget.attrs['disabled'] = True
self.fields['experimenter'].widget.attrs['class'] = 'disabled'
self.fields.keyOrder = ['experimenter']
class ActiveGroupForm(forms.Form):
def __init__(self, *args, **kwargs):
super(ActiveGroupForm, self).__init__(*args, **kwargs)
try:
self.fields['active_group'] = GroupModelChoiceField(queryset=kwargs['initial']['mygroups'], initial=kwargs['initial']['activeGroup'], empty_label=None, widget=forms.Select(attrs={'onchange':'window.location.href=\''+reverse(viewname="change_active_group")+'?url='+kwargs['initial']['url']+'&active_group=\'+this.options[this.selectedIndex].value'}))
except:
self.fields['active_group'] = GroupModelChoiceField(queryset=kwargs['initial']['mygroups'], initial=kwargs['initial']['activeGroup'], empty_label=None, widget=forms.Select(attrs={'onchange':'window.location.href=\''+reverse(viewname="change_active_group")+'?active_group=\'+this.options[this.selectedIndex].value'}))
self.fields.keyOrder = ['active_group']
class WellIndexForm(forms.Form):
def __init__(self, *args, **kwargs):
super(WellIndexForm, self).__init__(*args, **kwargs)
rmin, rmax = kwargs['initial']['range']
choices = [(str(i), "Field#%i" % (i-rmin+1)) for i in range(rmin, rmax+1)]
self.fields['index'] = forms.ChoiceField(choices=tuple(choices), widget=forms.Select(attrs={'onchange':'changeFiled(this.options[this.selectedIndex].value);'}))
self.fields.keyOrder = ['index']
###############################
# METADATA FORMS
class MetadataChannelForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataChannelForm, self).__init__(*args, **kwargs)
# Logical channel
# Name
try:
if kwargs['initial']['logicalChannel'] is not None:
self.fields['name'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].name, required=False)
else:
self.fields['name'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), required=False)
self.fields['name'].widget.attrs['disabled'] = True
self.fields['name'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['name'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['name'].widget.attrs['disabled'] = True
self.fields['name'].widget.attrs['class'] = 'disabled-metadata'
# excitationWave
try:
if kwargs['initial']['logicalChannel'] is not None:
self.fields['excitationWave'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].excitationWave, label="Excitation", required=False)
else:
self.fields['excitationWave'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), label="Excitation", required=False)
self.fields['excitationWave'].widget.attrs['disabled'] = True
self.fields['excitationWave'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['excitationWave'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Excitation", required=False)
self.fields['excitationWave'].widget.attrs['disabled'] = True
self.fields['excitationWave'].widget.attrs['class'] = 'disabled-metadata'
# emissionWave
try:
if kwargs['initial']['logicalChannel'] is not None:
self.fields['emissionWave'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].emissionWave, label="Emission", required=False)
else:
self.fields['emissionWave'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), label="Emission", required=False)
self.fields['emissionWave'].widget.attrs['disabled'] = True
self.fields['emissionWave'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['emissionWave'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Emission", required=False)
self.fields['emissionWave'].widget.attrs['disabled'] = True
self.fields['emissionWave'].widget.attrs['class'] = 'disabled-metadata'
# ndFilter
try:
if kwargs['initial']['logicalChannel'] is not None:
self.fields['ndFilter'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].ndFilter, label="ND filter [%]", required=False)
else:
self.fields['ndFilter'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), label="ND filter [%]", required=False)
self.fields['ndFilter'].widget.attrs['disabled'] = True
except:
self.fields['ndFilter'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="ND filter [%]", required=False)
self.fields['ndFilter'].widget.attrs['disabled'] = True
# pinHoleSize
try:
if kwargs['initial']['logicalChannel'] is not None:
self.fields['pinHoleSize'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].pinHoleSize, label="Pin hole size", required=False)
else:
self.fields['pinHoleSize'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), label="Pin hole size", required=False)
self.fields['pinHoleSize'].widget.attrs['disabled'] = True
except:
self.fields['pinHoleSize'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Pin hole size", required=False)
self.fields['pinHoleSize'].widget.attrs['disabled'] = True
# fluor
try:
if kwargs['initial']['logicalChannel'] is not None:
self.fields['fluor'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].fluor, required=False)
else:
self.fields['fluor'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), required=False)
self.fields['fluor'].widget.attrs['disabled'] = True
except:
self.fields['fluor'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['fluor'].widget.attrs['disabled'] = True
# Illumination
try:
if kwargs['initial']['logicalChannel'].getIllumination() is not None:
self.fields['illumination'] = MetadataModelChoiceField(queryset=kwargs['initial']['illuminations'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'illumination\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['logicalChannel'].getIllumination(), required=False)
else:
self.fields['illumination'] = MetadataModelChoiceField(queryset=kwargs['initial']['illuminations'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'illumination\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['illumination'].widget.attrs['disabled'] = True
except:
self.fields['illumination'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['illumination'].widget.attrs['disabled'] = True
# contrastMethods
try:
if kwargs['initial']['logicalChannel'].contrastMethod is not None:
self.fields['contrastMethod'] = MetadataModelChoiceField(queryset=kwargs['initial']['contrastMethods'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'contrastMethod\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['logicalChannel'].getContrastMethod(), label="Contrast method", required=False)
else:
self.fields['contrastMethod'] = MetadataModelChoiceField(queryset=kwargs['initial']['contrastMethods'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'contrastMethod\', this.options[this.selectedIndex].value);'}), label="Contrast method", required=False)
self.fields['contrastMethod'].widget.attrs['disabled'] = True
self.fields['contrastMethod'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['contrastMethod'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Contrast method", required=False)
self.fields['contrastMethod'].widget.attrs['disabled'] = True
self.fields['contrastMethod'].widget.attrs['class'] = 'disabled-metadata'
# Mode
try:
if kwargs['initial']['logicalChannel'].getMode() is not None:
self.fields['mode'] = MetadataModelChoiceField(queryset=kwargs['initial']['modes'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'mode\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['logicalChannel'].getMode().value, required=False)
else:
self.fields['mode'] = MetadataModelChoiceField(queryset=kwargs['initial']['modes'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'mode\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['mode'].widget.attrs['disabled'] = True
self.fields['mode'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['mode'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['mode'].widget.attrs['disabled'] = True
self.fields['mode'].widget.attrs['class'] = 'disabled-metadata'
# pockelCellSetting
try:
if kwargs['initial']['logicalChannel'].pockelCellSetting is not None:
self.fields['pockelCellSetting'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), initial=kwargs['initial']['logicalChannel'].pockelCellSetting, label="Pockel cell", required=False)
else:
self.fields['pockelCellSetting'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalChannel'].id)+', \'name\', this.value);'}), label="Pockel cell", required=False)
self.fields['pockelCellSetting'].widget.attrs['disabled'] = True
self.fields['pockelCellSetting'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['pockelCellSetting'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Pockel cell" ,required=False)
self.fields['pockelCellSetting'].widget.attrs['disabled'] = True
self.fields['pockelCellSetting'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['name', 'excitationWave', 'emissionWave', 'ndFilter', 'pinHoleSize', 'fluor', 'illumination', 'contrastMethod', 'mode', 'pockelCellSetting']
class MetadataDichroicForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataDichroicForm, self).__init__(*args, **kwargs)
# Manufacturer
try:
if kwargs['initial']['dichroic'].manufacturer is not None:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].id)+', \'manufacturer\', this.value);'}), initial=kwargs['initial']['dichroic'].manufacturer, required=False)
else:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].id)+', \'manufacturer\', this.value);'}), required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['manufacturer'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
# Model
try:
if kwargs['initial']['dichroic'].model is not None:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['dichroic'].model, required=False)
else:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].id)+', \'model\', this.value);'}), required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['model'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
# Serial number
try:
if kwargs['initial']['dichroic'].serialNumber is not None:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].serialNumber)+', \'serialNumber\', this.value);'}), initial=kwargs['initial']['dichroic'].serialNumber, label="Serial number", required=False)
else:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].serialNumber)+', \'serialNumber\', this.value);'}), label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['serialNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
# Lot number
try:
if kwargs['initial']['dichroic'].lotNumber is not None:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].lotNumber)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['dichroic'].lotNumber, label="Lot number", required=False)
else:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['dichroic'].lotNumber)+', \'lotNumber\', this.value);'}), label="Lot number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lotNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Lot number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['manufacturer', 'model', 'serialNumber', 'lotNumber']
class MetadataMicroscopeForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataMicroscopeForm, self).__init__(*args, **kwargs)
# Model
try:
if kwargs['initial']['microscope'].model is not None:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['microscope'].model, required=False)
else:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'model\', this.value);'}), required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['model'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
# Manufacturer
try:
if kwargs['initial']['microscope'].manufacturer is not None:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'manufacturer\', this.value);'}), initial=kwargs['initial']['microscope'].manufacturer, required=False)
else:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'manufacturer\', this.value);'}), required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['manufacturer'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
# Serial number
try:
if kwargs['initial']['microscope'].serialNumber is not None:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['microscope'].serialNumber, label="Serial number", required=False)
else:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'lotNumber\', this.value);'}), label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['serialNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
# Lot number
try:
if kwargs['initial']['microscope'].lotNumber is not None:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['microscope'].lotNumber, label="Serial number", required=False)
else:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'lotNumber\', this.value);'}), label="Serial number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lotNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
# Type
try:
if kwargs['initial']['microscope'].getMicroscopeType() is not None:
self.fields['type'] = MetadataModelChoiceField(queryset=kwargs['initial']['microscopeTypes'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'type\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['microscope'].getMicroscopeType().value, required=False)
else:
self.fields['type'] = MetadataModelChoiceField(queryset=kwargs['initial']['microscopeTypes'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['microscope'].id)+', \'type\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['type'].widget.attrs['disabled'] = True
self.fields['type'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['type'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['type'].widget.attrs['disabled'] = True
self.fields['type'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['model', 'manufacturer', 'serialNumber', 'lotNumber', 'type']
class MetadataObjectiveForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataObjectiveForm, self).__init__(*args, **kwargs)
# Model
try:
if kwargs['initial']['objective'].model is not None:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['objective'].model, required=False)
else:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'model\', this.value);'}), required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['model'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
# Manufacturer
try:
if kwargs['initial']['objective'].manufacturer is not None:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'manufacturer\', this.value);'}), initial=kwargs['initial']['objective'].manufacturer, required=False)
else:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'manufacturer\', this.value);'}), required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['manufacturer'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
# Serial Number
try:
if kwargs['initial']['objective'].serialNumber is not None:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'serialNumber\', this.value);'}), initial=kwargs['initial']['objective'].serialNumber, label="Serial number", required=False)
else:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'serialNumber\', this.value);'}), label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['serialNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
# Lot number
try:
if kwargs['initial']['objective'].lotNumber is not None:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalchannel'].getObjective().lotNumber)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['logicalchannel'].getObjective().lotNumber, label="Serial number", required=False)
else:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['logicalchannel'].getObjective().lotNumber)+', \'lotNumber\', this.value);'}), label="Serial number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lotNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
# Nominal Magnification
try:
if kwargs['initial']['objective'].nominalMagnification is not None:
self.fields['nominalMagnification'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'nominalMagnification\', this.value);'}), initial=kwargs['initial']['objective'].nominalMagnification, label="Nominal magnification", required=False)
else:
self.fields['nominalMagnification'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'nominalMagnification\', this.value);'}), label="Nominal magnification", required=False)
self.fields['nominalMagnification'].widget.attrs['disabled'] = True
self.fields['nominalMagnification'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['nominalMagnification'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Nominal magnification", required=False)
self.fields['nominalMagnification'].widget.attrs['disabled'] = True
self.fields['nominalMagnification'].widget.attrs['class'] = 'disabled-metadata'
# Calibrated Magnification
try:
if kwargs['initial']['objective'].calibratedMagnification is not None:
self.fields['calibratedMagnification'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'calibratedMagnification\', this.value);'}), initial=kwargs['initial']['objective'].calibratedMagnification, label="Calibrated magnification", required=False)
else:
self.fields['calibratedMagnification'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'calibratedMagnification\', this.value);'}), label="Calibrated magnification", required=False)
self.fields['calibratedMagnification'].widget.attrs['disabled'] = True
self.fields['calibratedMagnification'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['calibratedMagnification'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Calibrated magnification", required=False)
self.fields['calibratedMagnification'].widget.attrs['disabled'] = True
self.fields['calibratedMagnification'].widget.attrs['class'] = 'disabled-metadata'
# Lens NA
try:
if kwargs['initial']['objective'].lensNA is not None:
self.fields['lensNA'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'lensNA\', this.value);'}), initial=kwargs['initial']['objective'].lensNA, label="Lens NA", required=False)
else:
self.fields['lensNA'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'lensNA\', this.value);'}), required=False)
self.fields['lensNA'].widget.attrs['disabled'] = True
self.fields['lensNA'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lensNA'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Lens NA", required=False)
self.fields['lensNA'].widget.attrs['disabled'] = True
self.fields['lensNA'].widget.attrs['class'] = 'disabled-metadata'
# Immersion
try:
if kwargs['initial']['objective'].getImmersion() is not None:
self.fields['immersion'] = MetadataModelChoiceField(queryset=kwargs['initial']['immersions'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['objective'].id)+', \'immersion\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['objective'].getImmersion().value, required=False)
else:
self.fields['immersion'] = MetadataModelChoiceField(queryset=kwargs['initial']['immersions'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['objective'].id)+', \'immersion\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['immersion'].widget.attrs['disabled'] = True
self.fields['immersion'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['immersion'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['immersion'].widget.attrs['disabled'] = True
self.fields['immersion'].widget.attrs['class'] = 'disabled-metadata'
# Correction
try:
if kwargs['initial']['objective'].getCorrection() is not None:
self.fields['correction'] = MetadataModelChoiceField(queryset=kwargs['initial']['corrections'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['objective'].id)+', \'correction\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['objective'].getCorrection().value, required=False)
else:
self.fields['correction'] = MetadataModelChoiceField(queryset=kwargs['initial']['corrections'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['objective'].id)+', \'correction\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['correction'].widget.attrs['disabled'] = True
self.fields['correction'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['correction'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['correction'].widget.attrs['disabled'] = True
self.fields['correction'].widget.attrs['class'] = 'disabled-metadata'
# Working Distance
try:
if kwargs['initial']['objective'].workingDistance is not None:
self.fields['workingDistance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'workingDistance\', this.value);'}), initial=kwargs['initial']['objective'].workingDistance, label="Working distance", required=False)
else:
self.fields['workingDistance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'workingDistance\', this.value);'}), label="Working distance", required=False)
self.fields['workingDistance'].widget.attrs['disabled'] = True
self.fields['workingDistance'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['workingDistance'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Working distance", required=False)
self.fields['workingDistance'].widget.attrs['disabled'] = True
self.fields['workingDistance'].widget.attrs['class'] = 'disabled-metadata'
# Iris
try:
if kwargs['initial']['objective'].getIris() is not None:
self.fields['iris'] = forms.ChoiceField(choices=self.BOOLEAN_CHOICES, widget=forms.Select(attrs={'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'iris\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['objective'].getIris().value, required=False)
else:
self.fields['iris'] = forms.ChoiceField(choices=self.BOOLEAN_CHOICES, widget=forms.Select(attrs={'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objective'].id)+', \'iris\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['iris'].widget.attrs['disabled'] = True
self.fields['iris'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['iris'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['iris'].widget.attrs['disabled'] = True
self.fields['iris'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['model', 'manufacturer', 'serialNumber', 'lotNumber', 'nominalMagnification', 'calibratedMagnification', 'lensNA', 'immersion', 'correction', 'workingDistance', 'iris']
class MetadataObjectiveSettingsForm(MetadataObjectiveForm):
BOOLEAN_CHOICES = (
('', '---------'),
('True', 'True'),
('False', 'False'),
)
def __init__(self, *args, **kwargs):
super(MetadataObjectiveSettingsForm, self).__init__(*args, **kwargs)
# Objective Settings
# Correction Collar
try:
if kwargs['initial']['objectiveSettings'].correctionCollar is not None:
self.fields['correctionCollar'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objectiveSettings'].id)+', \'correctionCollar\', this.value);'}), initial=kwargs['initial']['objectiveSettings'].correctionCollar, label="Correction collar", required=False)
else:
self.fields['correctionCollar'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objectiveSettings'].id)+', \'correctionCollar\', this.value);'}), label="Correction collar", required=False)
self.fields['correctionCollar'].widget.attrs['disabled'] = True
self.fields['correctionCollar'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['correctionCollar'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Correction collar", required=False)
self.fields['correctionCollar'].widget.attrs['disabled'] = True
self.fields['correctionCollar'].widget.attrs['class'] = 'disabled-metadata'
# Medium
try:
if kwargs['initial']['objectiveSettings'].getMedium() is not None:
self.fields['medium'] = MetadataModelChoiceField(queryset=kwargs['initial']['mediums'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['objectiveSettings'].id)+', \'medium\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['objectiveSettings'].getMedium().value, required=False)
else:
self.fields['medium'] = MetadataModelChoiceField(queryset=kwargs['initial']['mediums'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['objectiveSettings'].id)+', \'medium\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['medium'].widget.attrs['disabled'] = True
self.fields['medium'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['medium'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['medium'].widget.attrs['disabled'] = True
self.fields['medium'].widget.attrs['class'] = 'disabled-metadata'
# Refractive Index
try:
if kwargs['initial']['objectiveSettings'].refractiveIndex is not None:
self.fields['refractiveIndex'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objectiveSettings'].id)+', \'refractiveIndex\', this.value);'}), initial=kwargs['initial']['objectiveSettings'].refractiveIndex, label="Refractive index", required=False)
else:
self.fields['refractiveIndex'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['objectiveSettings'].id)+', \'refractiveIndex\', this.value);'}), label="Refractive index", required=False)
self.fields['refractiveIndex'].widget.attrs['disabled'] = True
self.fields['refractiveIndex'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['refractiveIndex'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Refractive index", required=False)
self.fields['refractiveIndex'].widget.attrs['disabled'] = True
self.fields['refractiveIndex'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['model', 'manufacturer', 'serialNumber', 'lotNumber', 'nominalMagnification', 'calibratedMagnification', 'lensNA', 'immersion', 'correction', 'workingDistance', 'iris', 'correctionCollar', 'medium', 'refractiveIndex']
class MetadataFilterForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataFilterForm, self).__init__(*args, **kwargs)
# Filter
# Manufacturer
try:
if kwargs['initial']['filter'].manufacturer is not None:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'manufacturer\', this.value);'}), initial=kwargs['initial']['filter'].manufacturer, required=False)
else:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'manufacturer\', this.value);'}), required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['manufacturer'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
# Model
try:
if kwargs['initial']['filter'].model is not None:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['filter'].model, required=False)
else:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'model\', this.value);'}), required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['model'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
# Serial Number
try:
if kwargs['initial']['filter'].serialNumber is not None:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'serialNumber\', this.value);'}), initial=kwargs['initial']['filter'].serialNumber, label="Serial number", required=False)
else:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'serialNumber\', this.value);'}), label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['serialNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
# Lot number
try:
if kwargs['initial']['filter'].lotNumber is not None:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['filter'].lotNumber, label="Lot number", required=False)
else:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'lotNumber\', this.value);'}), label="Lot number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lotNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Lot number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
# Filter wheel
try:
if kwargs['initial']['filter'].filterWheel is not None:
self.fields['filterWheel'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'filterWheel\', this.value);'}), initial=kwargs['initial']['filter'].filterWheel, label="Filter wheel", required=False)
else:
self.fields['filterWheel'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'filterWheel\', this.value);'}), label="Filter wheel", required=False)
self.fields['filterWheel'].widget.attrs['disabled'] = True
self.fields['filterWheel'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['filterWheel'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Filter wheel", required=False)
self.fields['filterWheel'].widget.attrs['disabled'] = True
self.fields['filterWheel'].widget.attrs['class'] = 'disabled-metadata'
# Type
try:
if kwargs['initial']['filter'].getFilterType() is not None:
self.fields['type'] = MetadataModelChoiceField(queryset=kwargs['initial']['types'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['filter'].id)+', \'type\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['filter'].getFilterType().value, required=False)
else:
self.fields['type'] = MetadataModelChoiceField(queryset=kwargs['initial']['types'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['filter'].id)+', \'type\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['type'].widget.attrs['disabled'] = True
self.fields['type'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['type'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['type'].widget.attrs['disabled'] = True
self.fields['type'].widget.attrs['class'] = 'disabled-metadata'
# Cut in
try:
if kwargs['initial']['filter'].transmittanceRange is not None:
self.fields['cutIn'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutIn\', this.value);'}), initial=kwargs['initial']['filter'].getTransmittanceRange().cutIn, label="Cut in", required=False)
else:
self.fields['cutIn'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutIn\', this.value);'}), label="Cut in", required=False)
self.fields['cutIn'].widget.attrs['disabled'] = True
self.fields['cutIn'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['cutIn'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Cut in", required=False)
self.fields['cutIn'].widget.attrs['disabled'] = True
self.fields['cutIn'].widget.attrs['class'] = 'disabled-metadata'
# Cut out
try:
if kwargs['initial']['filter'].transmittanceRange is not None:
self.fields['cutOut'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutOut\', this.value);'}), initial=kwargs['initial']['filter'].getTransmittanceRange().cutOut, label="Cut out", required=False)
else:
self.fields['cutOut'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutOut\', this.value);'}), label="Cut out", required=False)
self.fields['cutOut'].widget.attrs['disabled'] = True
self.fields['cutOut'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['cutOut'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Cut out", required=False)
self.fields['cutOut'].widget.attrs['disabled'] = True
self.fields['cutOut'].widget.attrs['class'] = 'disabled-metadata'
# Cut in tolerance
try:
if kwargs['initial']['filter'].transmittanceRange is not None:
self.fields['cutInTolerance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutInTolerance\', this.value);'}), initial=kwargs['initial']['filter'].getTransmittanceRange().cutInTolerance, label="Cut in tolerance", required=False)
else:
self.fields['cutInTolerance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutInTolerance\', this.value);'}), label="Cut in tolerance", required=False)
self.fields['cutInTolerance'].widget.attrs['disabled'] = True
self.fields['cutInTolerance'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['cutInTolerance'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Cut in tolerance", required=False)
self.fields['cutInTolerance'].widget.attrs['disabled'] = True
self.fields['cutInTolerance'].widget.attrs['class'] = 'disabled-metadata'
# Cut on tolerance
try:
if kwargs['initial']['filter'].transmittanceRange is not None:
self.fields['cutOutTolerance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutOut\', this.value);'}), initial=kwargs['initial']['filter'].getTransmittanceRange().cutOutTolerance, label="Cut out tolerance", required=False)
else:
self.fields['cutOutTolerance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'cutOut\', this.value);'}), label="Cut out tolerance", required=False)
self.fields['cutOutTolerance'].widget.attrs['disabled'] = True
self.fields['cutOutTolerance'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['cutOutTolerance'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Cut out tolerance", required=False)
self.fields['cutOutTolerance'].widget.attrs['disabled'] = True
self.fields['cutOutTolerance'].widget.attrs['class'] = 'disabled-metadata'
# Transmittance
try:
if kwargs['initial']['filter'].transmittanceRange is not None:
self.fields['transmittance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'transmittance\', this.value);'}), initial=kwargs['initial']['filter'].getTransmittanceRange().transmittance, required=False)
else:
self.fields['transmittance'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['filter'].id)+', \'transmittance\', this.value);'}), required=False)
self.fields['transmittance'].widget.attrs['disabled'] = True
self.fields['transmittance'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['transmittance'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['transmittance'].widget.attrs['disabled'] = True
self.fields['transmittance'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['manufacturer', 'model', 'serialNumber', 'lotNumber', 'type', 'filterWheel', 'cutIn', 'cutOut', 'cutInTolerance', 'cutOutTolerance', 'transmittance']
class MetadataDetectorForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataDetectorForm, self).__init__(*args, **kwargs)
# Filter
# Manufacturer
try:
if kwargs['initial']['detector'] is not None:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'manufacturer\', this.value);'}), initial=kwargs['initial']['detector'].manufacturer, required=False)
else:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'manufacturer\', this.value);'}), required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['manufacturer'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
# Model
try:
if kwargs['initial']['detector'] is not None:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['detector'].model, required=False)
else:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'model\', this.value);'}), required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['model'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
# SN
try:
if kwargs['initial']['detector'] is not None:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'serialNumber\', this.value);'}), initial=kwargs['initial']['detector'].serialNumber, required=False)
else:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'serialNumber\', this.value);'}), required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['serialNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
# Lot number (NB. Untill OMERO model is updated in 4.3, this will throw since lotNumber is not yet supported)
try:
if kwargs['initial']['detector'] is not None:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['detector'].lotNumber, required=False)
else:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'lotNumber\', this.value);'}), required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lotNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
# Type
try:
if kwargs['initial']['detector'].getDetectorType() is not None:
self.fields['type'] = MetadataModelChoiceField(queryset=kwargs['initial']['types'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['detector'].id)+', \'type\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['detector'].getDetectorType().value, required=False)
else:
self.fields['type'] = MetadataModelChoiceField(queryset=kwargs['initial']['types'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['detector'].id)+', \'type\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['type'].widget.attrs['disabled'] = True
self.fields['type'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['type'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['type'].widget.attrs['disabled'] = True
self.fields['type'].widget.attrs['class'] = 'disabled-metadata'
# Gain
try:
if kwargs['initial']['detectorSettings'] is not None:
self.fields['gain'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'gain\', this.value);'}), initial=kwargs['initial']['detectorSettings'].gain, required=False)
elif kwargs['initial']['detector'] is not None:
self.fields['gain'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'gain\', this.value);'}), initial=kwargs['initial']['detector'].gain, required=False)
else:
self.fields['gain'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'gain\', this.value);'}), required=False)
self.fields['gain'].widget.attrs['disabled'] = True
self.fields['gain'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['gain'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['gain'].widget.attrs['disabled'] = True
self.fields['gain'].widget.attrs['class'] = 'disabled-metadata'
# Voltage
try:
if kwargs['initial']['detectorSettings'] is not None:
self.fields['voltage'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'voltage\', this.value);'}), initial=kwargs['initial']['detectorSettings'].voltage, required=False)
elif kwargs['initial']['detector'] is not None:
self.fields['voltage'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'voltage\', this.value);'}), initial=kwargs['initial']['detector'].voltage, required=False)
else:
self.fields['voltage'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'voltage\', this.value);'}), required=False)
self.fields['voltage'].widget.attrs['disabled'] = True
self.fields['voltage'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['voltage'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['voltage'].widget.attrs['disabled'] = True
self.fields['voltage'].widget.attrs['class'] = 'disabled-metadata'
# Offset
try:
if kwargs['initial']['detectorSettings'] is not None:
self.fields['offsetValue'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'offsetValue\', this.value);'}), initial=kwargs['initial']['detectorSettings'].offsetValue, label="Offset", required=False)
elif kwargs['initial']['detector'] is not None:
self.fields['offsetValue'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'offsetValue\', this.value);'}), initial=kwargs['initial']['detector'].offsetValue, label="Offset", required=False)
else:
self.fields['offsetValue'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'offsetValue\', this.value);'}), label="Offset", required=False)
self.fields['offsetValue'].widget.attrs['disabled'] = True
self.fields['offsetValue'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['offsetValue'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Offset", required=False)
self.fields['offsetValue'].widget.attrs['disabled'] = True
self.fields['offsetValue'].widget.attrs['class'] = 'disabled-metadata'
# Zoom
try:
if kwargs['initial']['detector'] is not None:
self.fields['zoom'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'zoom\', this.value);'}), initial=kwargs['initial']['detector'].zoom, required=False)
else:
self.fields['zoom'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'voltage\', this.value);'}), required=False)
self.fields['zoom'].widget.attrs['disabled'] = True
self.fields['zoom'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['zoom'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['zoom'].widget.attrs['disabled'] = True
self.fields['zoom'].widget.attrs['class'] = 'disabled-metadata'
# Amplification gain
try:
if kwargs['initial']['detector'] is not None:
self.fields['amplificationGain'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'amplificationGain\', this.value);'}), initial=kwargs['initial']['detector'].amplificationGain, label="Amplification gain", required=False)
else:
self.fields['amplificationGain'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detector'].id)+', \'amplificationGain\', this.value);'}), label="Amplification gain", required=False)
self.fields['amplificationGain'].widget.attrs['disabled'] = True
self.fields['amplificationGain'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['amplificationGain'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Amplification gain", required=False)
self.fields['amplificationGain'].widget.attrs['disabled'] = True
self.fields['amplificationGain'].widget.attrs['class'] = 'disabled-metadata'
# Read out rate
try:
if kwargs['initial']['detectorSettings'] is not None:
self.fields['readOutRate'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'readOutRate\', this.value);'}), initial=kwargs['initial']['detectorSettings'].readOutRate, label="Read out rate", required=False)
else:
self.fields['readOutRate'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'readOutRate\', this.value);'}), label="Read out rate", required=False)
self.fields['readOutRate'].widget.attrs['disabled'] = True
self.fields['readOutRate'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['readOutRate'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Read out rate", required=False)
self.fields['readOutRate'].widget.attrs['disabled'] = True
self.fields['readOutRate'].widget.attrs['class'] = 'disabled-metadata'
# Binning
try:
if kwargs['initial']['detectorSettings'] is not None:
self.fields['binning'] = MetadataModelChoiceField(queryset=kwargs['initial']['binnings'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'type\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['detectorSettings'].getBinning().value, required=False)
else:
self.fields['binning'] = MetadataModelChoiceField(queryset=kwargs['initial']['binnings'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['detectorSettings'].id)+', \'type\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['binning'].widget.attrs['disabled'] = True
self.fields['binning'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['binning'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['binning'].widget.attrs['disabled'] = True
self.fields['binning'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['manufacturer', 'model', 'serialNumber', 'lotNumber', 'type', 'gain', 'voltage', 'offsetValue', 'zoom', 'amplificationGain', 'readOutRate', 'binning']
class MetadataLightSourceForm(forms.Form):
BOOLEAN_CHOICES = (
('', '---------'),
('True', 'True'),
('False', 'False'),
)
def __init__(self, *args, **kwargs):
super(MetadataLightSourceForm, self).__init__(*args, **kwargs)
# Filter
# Manufacturer
try:
if kwargs['initial']['lightSource'].manufacturer is not None:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['lightSource'].manufacturer, required=False)
else:
self.fields['manufacturer'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'model\', this.value);'}), required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['manufacturer'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['manufacturer'].widget.attrs['disabled'] = True
self.fields['manufacturer'].widget.attrs['class'] = 'disabled-metadata'
# Model
try:
if kwargs['initial']['lightSource'].model is not None:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'model\', this.value);'}), initial=kwargs['initial']['lightSource'].model, required=False)
else:
self.fields['model'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'model\', this.value);'}), required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['model'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['model'].widget.attrs['disabled'] = True
self.fields['model'].widget.attrs['class'] = 'disabled-metadata'
# Serial Number
try:
if kwargs['initial']['lightSource'].serialNumber is not None:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'serialNumber\', this.value);'}), initial=kwargs['initial']['lightSource'].serialNumber, label="Serial number", required=False)
else:
self.fields['serialNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'serialNumber\', this.value);'}), label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['serialNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Serial number", required=False)
self.fields['serialNumber'].widget.attrs['disabled'] = True
self.fields['serialNumber'].widget.attrs['class'] = 'disabled-metadata'
# Lot Number
try:
if kwargs['initial']['lightSource'].lotNumber is not None:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'lotNumber\', this.value);'}), initial=kwargs['initial']['lightSource'].lotNumber, label="Lot number", required=False)
else:
self.fields['lotNumber'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'lotNumber\', this.value);'}), label="Lot number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lotNumber'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Lot number", required=False)
self.fields['lotNumber'].widget.attrs['disabled'] = True
self.fields['lotNumber'].widget.attrs['class'] = 'disabled-metadata'
# Power
try:
if kwargs['initial']['lightSource'].power is not None:
self.fields['power'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'power\', this.value);'}), initial=kwargs['initial']['lightSource'].power, required=False)
else:
self.fields['power'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'power\', this.value);'}), required=False)
self.fields['power'].widget.attrs['disabled'] = True
self.fields['power'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['power'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['power'].widget.attrs['disabled'] = True
self.fields['power'].widget.attrs['class'] = 'disabled-metadata'
# Type
try:
if kwargs['initial']['lightSource'].getLightSourceType() is not None:
self.fields['lstype'] = MetadataModelChoiceField(queryset=kwargs['initial']['lstypes'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'type\', this.options[this.selectedIndex].value);'}), label="Type", initial=kwargs['initial']['lightSource'].getLightSourceType().value, required=False)
else:
self.fields['lstype'] = MetadataModelChoiceField(queryset=kwargs['initial']['lstypes'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'type\', this.options[this.selectedIndex].value);'}), label="Type", required=False)
self.fields['lstype'].widget.attrs['disabled'] = True
self.fields['lstype'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lstype'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Type", required=False)
self.fields['lstype'].widget.attrs['disabled'] = True
self.fields['lstype'].widget.attrs['class'] = 'disabled-metadata'
# Medium
try:
if kwargs['initial']['lightSource'].getLaserMedium() is not None:
self.fields['lmedium'] = MetadataModelChoiceField(queryset=kwargs['initial']['mediums'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'medium\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['lightSource'].getLaserMedium().value, label="Medium", required=False)
else:
self.fields['lmedium'] = MetadataModelChoiceField(queryset=kwargs['initial']['mediums'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'medium\', this.options[this.selectedIndex].value);'}), label="Medium", required=False)
self.fields['lmedium'].widget.attrs['disabled'] = True
self.fields['lmedium'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['lmedium'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Medium", required=False)
self.fields['lmedium'].widget.attrs['disabled'] = True
self.fields['lmedium'].widget.attrs['class'] = 'disabled-metadata'
# Wavelength
try:
if kwargs['initial']['lightSource'].wavelength is not None:
self.fields['wavelength'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'wavelength\', this.value);'}), initial=kwargs['initial']['lightSource'].wavelength, required=False)
else:
self.fields['wavelength'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'wavelength\', this.value);'}), required=False)
self.fields['wavelength'].widget.attrs['disabled'] = True
self.fields['wavelength'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['wavelength'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['wavelength'].widget.attrs['disabled'] = True
self.fields['wavelength'].widget.attrs['class'] = 'disabled-metadata'
# FrequencyMultiplication
try:
if kwargs['initial']['lightSource'].frequencyMultiplication is not None:
self.fields['frequencyMultiplication'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'frequencyMultiplication\', this.value);'}), initial=kwargs['initial']['lightSource'].frequencyMultiplication, label="Frequency Multiplication", required=False)
else:
self.fields['frequencyMultiplication'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'frequencyMultiplication\', this.value);'}), label="Frequency Multiplication", required=False)
self.fields['frequencyMultiplication'].widget.attrs['disabled'] = True
self.fields['frequencyMultiplication'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['frequencyMultiplication'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Frequency Multiplication", required=False)
self.fields['frequencyMultiplication'].widget.attrs['disabled'] = True
self.fields['frequencyMultiplication'].widget.attrs['class'] = 'disabled-metadata'
# Tuneable
try:
if kwargs['initial']['lightSource'].tuneable is not None:
self.fields['tuneable'] = forms.ChoiceField(choices=self.BOOLEAN_CHOICES, widget=forms.Select(attrs={'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'tuneable\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['lightSource'].tuneable, required=False)
else:
self.fields['tuneable'] = forms.ChoiceField(choices=self.BOOLEAN_CHOICES, widget=forms.Select(attrs={'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'tuneable\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['tuneable'].widget.attrs['disabled'] = True
self.fields['tuneable'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['tuneable'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['tuneable'].widget.attrs['disabled'] = True
self.fields['tuneable'].widget.attrs['class'] = 'disabled-metadata'
# Pulse
try:
if kwargs['initial']['lightSource'].pulse is not None:
self.fields['pulse'] = MetadataModelChoiceField(queryset=kwargs['initial']['pulses'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'pulse\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['lightSource'].pulse, required=False)
else:
self.fields['pulse'] = MetadataModelChoiceField(queryset=kwargs['initial']['pulses'], empty_label=u"Not set", widget=forms.Select(attrs={'onchange':'saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'pulse\', this.options[this.selectedIndex].value);'}), required=False)
self.fields['pulse'].widget.attrs['disabled'] = True
self.fields['pulse'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['pulse'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['pulse'].widget.attrs['disabled'] = True
self.fields['pulse'].widget.attrs['class'] = 'disabled-metadata'
# Repetition Rate
try:
if kwargs['initial']['lightSource'].repetitionRate is not None:
self.fields['repetitionRate'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'repetitionRate\', this.value);'}), initial=kwargs['initial']['lightSource'].repetitionRate, label="Repetition rate", required=False)
else:
self.fields['repetitionRate'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'repetitionRate\', this.value);'}), label="Repetition rate", required=False)
self.fields['repetitionRate'].widget.attrs['disabled'] = True
self.fields['repetitionRate'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['repetitionRate'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Repetition rate", required=False)
self.fields['repetitionRate'].widget.attrs['disabled'] = True
self.fields['repetitionRate'].widget.attrs['class'] = 'disabled-metadata'
# Pockel Cell
try:
if kwargs['initial']['lightSource'].pockelCell is not None:
self.fields['pockelCell'] = forms.ChoiceField(choices=self.BOOLEAN_CHOICES, widget=forms.Select(attrs={'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'pockelCell\', this.options[this.selectedIndex].value);'}), initial=kwargs['initial']['lightSource'].pockelCell, label="Pockel Cell", required=False)
else:
self.fields['pockelCell'] = forms.ChoiceField(choices=self.BOOLEAN_CHOICES, widget=forms.Select(attrs={'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'pockelCell\', this.options[this.selectedIndex].value);'}), label="Pockel Cell", required=False)
self.fields['pockelCell'].widget.attrs['disabled'] = True
self.fields['pockelCell'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['pockelCell'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Pockel Cell", required=False)
self.fields['pockelCell'].widget.attrs['disabled'] = True
self.fields['pockelCell'].widget.attrs['class'] = 'disabled-metadata'
# Attenuation
#try:
# if kwargs['initial']['lightSource'].attenuation is not None:
# self.fields['attenuation'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'attenuation\', this.value);'}), initial=kwargs['initial']['lightSource'].attenuation, required=False)
# else:
# self.fields['attenuation'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['lightSource'].id)+', \'attenuation\', this.value);'}), required=False)
# self.fields['attenuation'].widget.attrs['disabled'] = True
# self.fields['attenuation'].widget.attrs['class'] = 'disabled-metadata'
#except:
# self.fields['attenuation'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
# self.fields['attenuation'].widget.attrs['disabled'] = True
# self.fields['attenuation'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['manufacturer', 'model', 'serialNumber', 'lotNumber', 'power', 'lstype', 'lmedium', 'wavelength', 'frequencyMultiplication', 'tuneable', 'pulse' , 'repetitionRate', 'pockelCell']
class MetadataEnvironmentForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataEnvironmentForm, self).__init__(*args, **kwargs)
# Imaging environment
# Temperature
try:
if kwargs['initial']['image'].getImagingEnvironment().temperature is not None:
self.fields['temperature'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'temperature\', this.value);'}), initial=kwargs['initial']['image'].getImagingEnvironment().temperature, required=False)
else:
self.fields['temperature'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'temperature\', this.value);'}), required=False)
self.fields['temperature'].widget.attrs['disabled'] = True
self.fields['temperature'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['temperature'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['temperature'].widget.attrs['disabled'] = True
self.fields['temperature'].widget.attrs['class'] = 'disabled-metadata'
# Air Pressure
try:
if kwargs['initial']['image'].getImagingEnvironment().airPressure is not None:
self.fields['airPressure'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'airPressure\', this.value);'}), initial=kwargs['initial']['image'].getImagingEnvironment().airPressure, label="Air Pressure", required=False)
else:
self.fields['airPressure'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'airPressure\', this.value);'}), label="Air Pressure", required=False)
self.fields['airPressure'].widget.attrs['disabled'] = True
self.fields['airPressure'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['airPressure'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), label="Air Pressure", initial="N/A", required=False)
self.fields['airPressure'].widget.attrs['disabled'] = True
self.fields['airPressure'].widget.attrs['class'] = 'disabled-metadata'
# Humidity
try:
if kwargs['initial']['image'].getImagingEnvironment().humidity is not None:
self.fields['humidity'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'humidity\', this.value);'}), initial=kwargs['initial']['image'].getImagingEnvironment().humidity, required=False)
else:
self.fields['humidity'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'humidity\', this.value);'}), required=False)
self.fields['humidity'].widget.attrs['disabled'] = True
self.fields['humidity'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['humidity'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", required=False)
self.fields['humidity'].widget.attrs['disabled'] = True
self.fields['humidity'].widget.attrs['class'] = 'disabled-metadata'
# CO2 percent
try:
if kwargs['initial']['image'].getImagingEnvironment().co2percent is not None:
self.fields['co2percent'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'co2percent\', this.value);'}), initial=kwargs['initial']['image'].getImagingEnvironment().co2percent, label="CO2 [%]", required=False)
else:
self.fields['co2percent'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'co2percent\', this.value);'}), label="CO2 [%]", required=False)
self.fields['co2percent'].widget.attrs['disabled'] = True
self.fields['co2percent'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['co2percent'] = forms.CharField(max_length=10, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="CO2 [%]", required=False)
self.fields['co2percent'].widget.attrs['disabled'] = True
self.fields['co2percent'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['airPressure', 'co2percent', 'humidity', 'temperature']
class MetadataStageLabelForm(forms.Form):
def __init__(self, *args, **kwargs):
super(MetadataStageLabelForm, self).__init__(*args, **kwargs)
# Stage label
# Position x
try:
if kwargs['initial']['image'].getStageLabel() is not None:
self.fields['positionx'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'positionx\', this.value);'}), initial=kwargs['initial']['image'].getStageLabel().positionx, label="Position X", required=False)
else:
self.fields['positionx'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'positionx\', this.value);'}), label="Position X", required=False)
self.fields['positionx'].widget.attrs['disabled'] = True
self.fields['positionx'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['positionx'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Position X", required=False)
self.fields['positionx'].widget.attrs['disabled'] = True
self.fields['positionx'].widget.attrs['class'] = 'disabled-metadata'
# Position y
try:
if kwargs['initial']['image'].getStageLabel() is not None:
self.fields['positiony'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'positiony\', this.value);'}), initial=kwargs['initial']['image'].getStageLabel().positiony, label="Position Y", required=False)
else:
self.fields['positiony'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'positiony\', this.value);'}), label="Position Y", required=False)
self.fields['positiony'].widget.attrs['disabled'] = True
self.fields['positiony'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['positiony'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Position Y", required=False)
self.fields['positiony'].widget.attrs['disabled'] = True
self.fields['positiony'].widget.attrs['class'] = 'disabled-metadata'
# Position z
try:
if kwargs['initial']['image'].getStageLabel() is not None:
self.fields['positionz'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'positionz\', this.value);'}), initial=kwargs['initial']['image'].getStageLabel().positionz, label="Position Z", required=False)
else:
self.fields['positionz'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25, 'onchange':'javascript:saveMetadata('+str(kwargs['initial']['image'].id)+', \'positionz\', this.value);'}), label="Position Z", required=False)
self.fields['positionz'].widget.attrs['disabled'] = True
self.fields['positionz'].widget.attrs['class'] = 'disabled-metadata'
except:
self.fields['positionz'] = forms.CharField(max_length=100, widget=forms.TextInput(attrs={'size':25}), initial="N/A", label="Position Z", required=False)
self.fields['positionz'].widget.attrs['disabled'] = True
self.fields['positionz'].widget.attrs['class'] = 'disabled-metadata'
self.fields.keyOrder = ['positionx', 'positiony', 'positionz']
|
jballanc/openmicroscopy
|
components/tools/OmeroWeb/omeroweb/webclient/forms.py
|
Python
|
gpl-2.0
| 111,021
|
from ietf.submit.parsers.base import FileParser
class XMLParser(FileParser):
ext = 'xml'
mimetype = 'application/xml'
# If some error is found after this method invocation
# no other file parsing is recommended
def critical_parse(self):
super(XMLParser, self).critical_parse()
return self.parsed_info
|
wpjesus/codematch
|
ietf/submit/parsers/xml_parser.py
|
Python
|
bsd-3-clause
| 345
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
#
# Copyright (c) 2007 Mike (mosburger) Desjardins <desjardinsmike@gmail.com>
# Please do not email the above person for support. The
# email address is only there for license/copyright purposes.
#
# This is an implementation of the google plugin for a calendar applet for
# Avant Window Navigator.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
#
import datetime
import awn
awn.check_dependencies(globals(), 'dateutil', 'vobject')
from dateutil.rrule import rrulestr
from awn.extras import _
class IcsCal:
events = []
get_start = False
get_end = False
in_event = False
summary = None
start = None
end = None
requires_login = False
def __init__(self, applet, files):
self.applet = applet
self.files = files
def add_event(self, start, end, summary):
text = '%s-%s %s' % (start.strftime("%I:%M%p"),
end.strftime("%I:%M%p"),
summary)
self.events.append([start.strftime("%H:%M"), text])
def get_appointments(self, date, url):
dtdate = datetime.date(date[0], date[1], date[2])
self.events = []
for filename in self.files:
cal = vobject.readOne(open(filename, 'rb'))
for component in cal.components():
if component.name == 'VEVENT':
dtstart = component.dtstart.value
dtend = component.dtend.value
summary = component.summary.value
# See if this is a recurring appointment
if hasattr(component, 'rrule'):
# Add only if an instance happens to be today.
[self.add_event(dtstart, dtend, summary)
for appt in rrulestr(str(component.rrule.value))
if appt.date() == dtdate]
elif dtstart.date == dtdate:
self.add_event(dtstart, dtend, summary)
if len(self.events) == 0:
self.events.append([None, _("No appointments")])
else:
self.events.sort()
return self.events
def convert_time_to_text(self, when):
hour = int(when[9:11])
mins = when[11:13]
text = ""
if self.applet.twelve_hour_clock == True:
trail = "am"
if hour >= 12:
trail = "pm"
hour = hour % 12
if hour == 0:
hour = 12
text = str(hour) + ":" + mins + trail
else:
text = when[9:11] + ":" + when[11:13]
return text
|
gilir/awn-extras-debian
|
applets/maintained/calendar/icscal.py
|
Python
|
gpl-3.0
| 3,344
|
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""General util functions commonly used across different models."""
def get_fp_spec(sig_bit: int, exp_bit: int):
"""Create fp spec which defines precision for floating-point quantization.
Args:
sig_bit: the number of bits assigned for significand.
exp_bit: the number of bits assigned for exponent.
Returns:
fp spec
"""
exp_bound = 2**(exp_bit - 1) - 1
prec = {'exp_min': -exp_bound, 'exp_max': exp_bound, 'sig_bits': sig_bit}
return prec
|
google/aqt
|
utils/common.py
|
Python
|
apache-2.0
| 1,045
|
#-*- coding: utf8 -*
#
# Max E. Kuznecov ~syhpoon <syhpoon@syhpoon.name> 2008
#
# This file is part of XYZCommander.
# XYZCommander is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# XYZCommander is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
# You should have received a copy of the GNU Lesser Public License
# along with XYZCommander. If not, see <http://www.gnu.org/licenses/>.
import re
from libxyz.parser import BaseParser
from libxyz.parser import Lexer
from libxyz.parser import ParsedData
from libxyz.exceptions import XYZValueError
from libxyz.exceptions import LexerError
import libxyz
class BlockParser(BaseParser):
"""
BaseParser is used to parse block structures.
Format:
name {
var1 <assign> val1 <delimiter>
var2 <assign> val2 [<list_separator>val3...] <delimiter>
...
}
"""
STATE_INIT = 0
STATE_BLOCK_OPEN = 1
STATE_VARIABLE = 2
STATE_ASSIGN = 3
STATE_VALUE = 4
STATE_DELIM = 5
STATE_LIST_VALUE = 6
DEFAULT_OPT = {
u"comment": u"#",
u"varre": re.compile(r"^[\w-]+$"),
u"assignchar": u"=",
u"delimiter": u"\n",
u"validvars": (),
u"value_validator": None,
u"count": 0,
u"list_separator": u",",
u"macrochar": u"&",
u"var_transform": None,
}
def __init__(self, opt=None):
"""
@param opt: Parser options.
@type opt: dict
Available options:
- comment: Comment character.
Everything else ignored until EOL.
Type: I{string (single char)}
Default: #
- delimiter: Character to use as delimiter between statements.
Type: I{string (single char)}
Default: \\n
- varre: Valid variable name regular expression.
Type: I{Compiled re object (L{re.compile})}
Default: ^[\w-]+$
- assignchar: Variable-value split character.
Type: I{string (single char)}
Default: =
- validvars: List of variables valid within block.
Type: I{sequence}
Default: ()
- value_validator: Value validator
Type: A function that takes three args:
current block name, var and value and validates them.
In case value is invalid, XYZValueError must be raised.
Otherwise function must return required value, possibly modified.
Default: None
- count: How many blocks to parse. If count <= 0 - will parse
all available.
Type: integer
Default: 0
- list_separator: Character to separate elements in list
Type: I{string (single char)}
Default: ,
- macrochar: Macro character (None to disable macros)
Type: I{string (single char)}
Default: &
- var_transform: A function which is called with variable name
as single argument, and which returns new variable object
or raises XYZValueError
"""
super(BlockParser, self).__init__()
if opt and not isinstance(opt, dict):
raise XYZValueError(_(u"Invalid opt type: %s. "\
u"Dictionary expected.") % type(opt))
self.opt = opt or self.DEFAULT_OPT
self.set_opt(self.DEFAULT_OPT, self.opt)
self._default_data = None
self._state = self.STATE_INIT
self._parsed_obj = None
self._varname = None
self._macroname = None
self._sdata = None
self._result = {}
self._current_list = []
self._macros = {}
self._lexer = None
self._openblock = u"{"
self._closeblock = u"}"
self._parsed = 0
self._tok_type = None
self._parse_table = {
self.STATE_INIT: self._process_state_init,
self.STATE_BLOCK_OPEN: self._process_state_block_open,
self.STATE_VARIABLE: self._process_state_variable,
self.STATE_ASSIGN: self._process_state_assign,
self.STATE_VALUE: self._process_state_value,
self.STATE_LIST_VALUE: self._process_state_list_value,
self.STATE_DELIM: self._process_state_delim,
}
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def parse(self, source, default_data=None):
"""
Parse blocks of text and return a dict of L{ParsedData} objects
or raise L{libxyz.exceptions.ParseError} exception
@param source: Source data
@param default_data: Dictionary containing L{libxyz.parser.ParsedData}
objects with default values.
"""
self._cleanup()
self._result = default_data or {}
self._parsed = 0
self._default_data = default_data
_tokens = (self._openblock,
self._closeblock,
self.assignchar,
self.delimiter,
self.list_separator,
)
self._lexer = Lexer(source, _tokens, self.comment, self.macrochar)
self._sdata = self._lexer.sdata
try:
while True:
_res = self._lexer.lexer()
if _res is None:
break
else:
self._tok_type, _val = _res
# We're only interested in LF in DELIM or LIST_VALUE
# states
if _val == "\n" and \
self._state not in (self.STATE_DELIM, self.STATE_LIST_VALUE):
continue
else:
self._parse_table[self._state](_val)
except LexerError, e:
self.error(str(e))
self._check_complete()
return self._result
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_init(self, word):
if self._default_data and word in self._default_data:
self._parsed_obj = self._default_data[word]
else:
self._parsed_obj = ParsedData(word)
self._state = self.STATE_BLOCK_OPEN
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_block_open(self, word):
if word != self._openblock:
self.error(msg=(word, self._openblock),
etype=self.error_unexpected)
else:
self._state = self.STATE_VARIABLE
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_variable(self, word):
if word == self._closeblock:
self._complete_block()
return
if self._tok_type == self._lexer.TOKEN_MACRO:
self._macroname = word
else:
if self.validvars and word not in self.validvars:
self.error(_(u"Unknown variable %s") % word)
elif self.varre.match(word) is None:
self.error(_(u"Invalid variable name: %s") % word)
if callable(self.var_transform):
try:
self._varname = self.var_transform(word)
except XYZValueError, e:
self.error(_(u"Variable transformation error: %s") %
unicode(e))
else:
self._varname = word
self._state = self.STATE_ASSIGN
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_assign(self, word):
if word != self.assignchar:
self.error(msg=(word, self.assignchar),
etype=self.error_unexpected)
else:
self._state = self.STATE_VALUE
self._lexer.escaping_on()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_list_value(self, word):
if word == self.list_separator:
self._state = self.STATE_VALUE
return
if len(self._current_list) == 1:
_value = self._current_list[0]
else:
_value = tuple(self._current_list)
# Macro
if self._macroname:
self._macros[self._macroname] = _value
self._macroname = None
# Variable
else:
if self.value_validator:
try:
_value = self.value_validator(self._parsed_obj.name,
self._varname, _value)
except XYZValueError, e:
self.error(_(u"Invalid value: %s") % str(e))
self._parsed_obj[self._varname] = _value
self._varname = None
self._current_list = []
self._lexer.escaping_off()
self._state = self.STATE_DELIM
self._lexer.unget(word)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_value(self, word):
if self._tok_type == self._lexer.TOKEN_MACRO:
try:
self._current_list.append(self._macros[word])
except KeyError:
self.error(_(u"Undefined macro %s") % word)
else:
self._current_list.append(word)
self._state = self.STATE_LIST_VALUE
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _process_state_delim(self, word):
if word == self._closeblock:
self._complete_block()
return
if word != self.delimiter:
self.error(msg=(word, self.delimiter),
etype=self.error_unexpected)
else:
self._state = self.STATE_VARIABLE
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _complete_block(self):
if self._parsed_obj:
self._result[self._parsed_obj.name] = self._parsed_obj
self._parsed += 1
self._cleanup()
if self.count > 0 and self.count == self._parsed:
self._lexer.done()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _cleanup(self):
"""
Set all neccessary variables to initial state
"""
self._parsed_obj = None
self._varname = None
self._macroname = None
self._state = self.STATE_INIT
self._in_comment = False
self._in_quote = False
self._current_list = []
self._macros = {}
self._default_data = None
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def _check_complete(self):
"""
Check state after source reaches EOF for consistency
"""
_err = False
_msg = None
if self._in_quote:
_err, _msg = True, _(u"Unterminated quote")
if self._state != self.STATE_INIT:
if self._state != self.STATE_BLOCK_OPEN:
_err, _msg = True, _(u"Unclosed block")
else:
_err, _msg = True, None
if self._lexer.get_idt():
_err, _msg = True, None
if _err:
self.error(_msg)
|
syhpoon/xyzcmd
|
libxyz/parser/block.py
|
Python
|
gpl-3.0
| 11,515
|
# CC150 8.1
# Design the data structure for a generic deck of cards.
class suit():
def __init__(self, v):
self.value = v
|
cyandterry/Python-Study
|
Ninja/Concept_Implement/OOD.py
|
Python
|
mit
| 134
|
# -*- coding: utf-8 -*-
#
# test_dumping.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Tests for topology hl_api dumping functions.
NOTE: These tests only test whether the code runs, it does not check
whether the results produced are correct.
"""
import unittest
import nest
import nest.topology as topo
import sys
import os
import os.path
class PlottingTestCase(unittest.TestCase):
def nest_tmpdir(self):
"""Loads temporary directory path from the environment variable, returns current directory otherwise"""
if 'NEST_DATA_PATH' in os.environ:
return os.environ['NEST_DATA_PATH']
else:
return '.'
def test_DumpNodes(self):
"""Test dumping nodes."""
ldict = {'elements': 'iaf_neuron', 'rows': 3, 'columns':3,
'extent': [2., 2.], 'edge_wrap': True}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
topo.DumpLayerNodes(l, os.path.join(self.nest_tmpdir(), 'test_DumpNodes.out.lyr') )
self.assertTrue(True)
def test_DumpNodes2(self):
"""Test dumping nodes, two layers."""
ldict = {'elements': 'iaf_neuron', 'rows': 3, 'columns':3,
'extent': [2., 2.], 'edge_wrap': True}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
topo.DumpLayerNodes(l*2, os.path.join(self.nest_tmpdir(), 'test_DumpNodes2.out.lyr') )
self.assertTrue(True)
def test_DumpConns(self):
"""Test dumping connections."""
ldict = {'elements': 'iaf_neuron', 'rows': 3, 'columns':3,
'extent': [2., 2.], 'edge_wrap': True}
cdict = {'connection_type': 'divergent', 'mask': {'circular': {'radius': 1.}}}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
topo.ConnectLayers(l, l, cdict)
topo.DumpLayerConnections(l, 'static_synapse', os.path.join(self.nest_tmpdir(), 'test_DumpConns.out.cnn') )
self.assertTrue(True)
def test_DumpConns2(self):
"""Test dumping connections, 2 layers."""
ldict = {'elements': 'iaf_neuron', 'rows': 3, 'columns':3,
'extent': [2., 2.], 'edge_wrap': True}
cdict = {'connection_type': 'divergent', 'mask': {'circular': {'radius': 1.}}}
nest.ResetKernel()
l = topo.CreateLayer(ldict)
topo.ConnectLayers(l, l, cdict)
topo.DumpLayerConnections(l*2, 'static_synapse', os.path.join(self.nest_tmpdir(), 'test_DumpConns2.out.cnn') )
self.assertTrue(True)
def suite():
suite = unittest.makeSuite(PlottingTestCase,'test')
return suite
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
try:
import matplotlib.pyplot as plt
plt.show()
except ImportError:
pass
|
kristoforcarlson/nest-simulator-fork
|
topology/pynest/tests/test_dumping.py
|
Python
|
gpl-2.0
| 3,483
|
###############################################################################
##
## Copyright (C) 2013-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
import re
import six
# TODO:
# suffix matches
# args + kwargs
# uuid converter
# multiple URI patterns per decorated object
# classes: Pattern, EndpointPattern, ..
class Pattern:
"""
A WAMP URI Pattern.
"""
URI_TARGET_ENDPOINT = 1
URI_TARGET_HANDLER = 2
URI_TARGET_EXCEPTION = 3
URI_TYPE_EXACT = 1
URI_TYPE_PREFIX = 2
URI_TYPE_WILDCARD = 3
_URI_COMPONENT = re.compile(r"^[a-z][a-z0-9_]*$")
_URI_NAMED_COMPONENT = re.compile(r"^<([a-z][a-z0-9_]*)>$")
_URI_NAMED_CONVERTED_COMPONENT = re.compile(r"^<([a-z][a-z0-9_]*):([a-z]*)>$")
def __init__(self, uri, target):
"""
Constructor for WAMP URI pattern.
:param uri: The URI or URI pattern, e.g. `"com.myapp.product.<product:int>.update"`.
:type uri: str
:param target: The target for this pattern: a procedure endpoint (a callable),
an event handler (a callable) or an exception (a class).
"""
assert(type(uri) == six.text_type)
assert(target in [Pattern.URI_TARGET_ENDPOINT,
Pattern.URI_TARGET_HANDLER,
Pattern.URI_TARGET_EXCEPTION])
components = uri.split('.')
pl = []
nc = {}
i = 0
for component in components:
match = Pattern._URI_NAMED_CONVERTED_COMPONENT.match(component)
if match:
ctype = match.groups()[1]
if ctype not in ['string', 'int', 'suffix']:
raise Exception("invalid URI")
if ctype == 'suffix' and i != len(components) - 1:
raise Exception("invalid URI")
name = match.groups()[0]
if name in nc:
raise Exception("invalid URI")
if ctype in ['string', 'suffix']:
nc[name] = str
elif ctype == 'int':
nc[name] = int
else:
# should not arrive here
raise Exception("logic error")
pl.append("(?P<{0}>[a-z0-9_]+)".format(name))
continue
match = Pattern._URI_NAMED_COMPONENT.match(component)
if match:
name = match.groups()[0]
if name in nc:
raise Exception("invalid URI")
nc[name] = str
pl.append("(?P<{0}>[a-z][a-z0-9_]*)".format(name))
continue
match = Pattern._URI_COMPONENT.match(component)
if match:
pl.append(component)
continue
raise Exception("invalid URI")
if nc:
# URI pattern
self._type = Pattern.URI_TYPE_WILDCARD
p = "^" + "\.".join(pl) + "$"
self._pattern = re.compile(p)
self._names = nc
else:
# exact URI
self._type = Pattern.URI_TYPE_EXACT
self._pattern = None
self._names = None
self._uri = uri
self._target = target
def uri(self):
"""
Returns the original URI (pattern) for this pattern.
:returns: str -- The URI (pattern), e.g. `"com.myapp.product.<product:int>.update"`.
"""
return self._uri
def match(self, uri):
"""
Match the given (fully qualified) URI according to this pattern
and return extracted args and kwargs.
:param uri: The URI to match, e.g. `"com.myapp.product.123456.update"`.
:type uri: str
:returns: tuple -- A tuple `(args, kwargs)`
"""
args = []
kwargs = {}
if self._type == Pattern.URI_TYPE_EXACT:
return args, kwargs
elif self._type == Pattern.URI_TYPE_WILDCARD:
match = self._pattern.match(uri)
if match:
for key in self._names:
val = match.group(key)
val = self._names[key](val)
kwargs[key] = val
return args, kwargs
else:
raise Exception("no match")
def is_endpoint(self):
"""
Check if this pattern is for a procedure endpoint.
:returns: bool -- `True`, iff this pattern is for a procedure endpoint.
"""
return self._target == Pattern.URI_TARGET_ENDPOINT
def is_handler(self):
"""
Check if this pattern is for an event handler.
:returns: bool -- `True`, iff this pattern is for an event handler.
"""
return self._target == Pattern.URI_TARGET_HANDLER
def is_exception(self):
"""
Check if this pattern is for an exception.
:returns: bool -- `True`, iff this pattern is for an exception.
"""
return self._target == Pattern.URI_TARGET_EXCEPTION
|
hlzz/dotfiles
|
graphics/VTK-7.0.0/ThirdParty/AutobahnPython/autobahn/wamp/uri.py
|
Python
|
bsd-3-clause
| 5,528
|
import angr
import logging
l = logging.getLogger(name=__name__)
def convert_prot(prot):
"""
Convert from a windows memory protection constant to an angr bitmask
"""
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa366786(v=vs.85).aspx
if prot & 0x10:
return 4
if prot & 0x20:
return 5
if prot & 0x40:
return 7
if prot & 0x80:
return 7
if prot & 0x01:
return 0
if prot & 0x02:
return 1
if prot & 0x04:
return 3
if prot & 0x08:
return 3
raise angr.errors.SimValueError("Unknown windows memory protection constant: %#x" % prot)
def deconvert_prot(prot):
"""
Convert from a angr bitmask to a windows memory protection constant
"""
if prot in (2, 6):
raise angr.errors.SimValueError("Invalid memory protection for windows process")
return [0x01, 0x02, None, 0x04, 0x10, 0x20, None, 0x40][prot]
# https://msdn.microsoft.com/en-us/library/windows/desktop/aa366890(v=vs.85).aspx
class VirtualAlloc(angr.SimProcedure):
def run(self, lpAddress, dwSize, flAllocationType, flProtect):
l.debug("VirtualAlloc(%s, %s, %s, %s)", lpAddress, dwSize, flAllocationType, flProtect)
addrs = self.state.solver.eval_upto(lpAddress, 2)
if len(addrs) != 1:
raise angr.errors.SimValueError("VirtualAlloc can't handle symbolic lpAddress")
addr = addrs[0]
addr &= ~0xfff
size = self.state.solver.max_int(dwSize)
if dwSize.symbolic and size > self.state.libc.max_variable_size:
l.warning('symbolic VirtualAlloc dwSize %s has maximum %#x, greater than state.libc.max_variable_size %#x',
dwSize, size, self.state.libc.max_variable_size)
size = self.state.libc.max_variable_size
flagss = self.state.solver.eval_upto(flAllocationType, 2)
if len(flagss) != 1:
raise angr.errors.SimValueError("VirtualAlloc can't handle symbolic flAllocationType")
flags = flagss[0]
prots = self.state.solver.eval_upto(flProtect, 2)
if len(prots) != 1:
raise angr.errors.SimValueError("VirtualAlloc can't handle symbolic flProtect")
prot = prots[0]
angr_prot = convert_prot(prot)
if flags & 0x00080000 or flags & 0x1000000:
l.warning("VirtualAlloc with MEM_RESET and MEM_RESET_UNDO are not supported")
return addr
if flags & 0x00002000 or addr == 0: # MEM_RESERVE
if addr == 0:
l.debug("...searching for address")
while True:
addr = self.allocate_memory(size)
try:
self.state.memory.map_region(addr, size, angr_prot, init_zero=True)
except angr.errors.SimMemoryError:
continue
else:
l.debug("...found %#x", addr)
break
else:
try:
self.state.memory.map_region(addr, size, angr_prot, init_zero=True)
except angr.errors.SimMemoryError:
l.debug("...failed, bad address")
return 0
if flags & 0x00001000: # MEM_COMMIT
# we don't really emulate commit. we just check to see if the region was allocated.
try:
self.state.memory.permissions(addr)
except angr.errors.SimMemoryError:
l.debug("...not reserved")
return 0
# if we got all the way to the end, nothing failed! success!
return addr
def allocate_memory(self,size):
addr = self.state.heap.mmap_base
new_base = addr + size
if new_base & 0xfff:
new_base = (new_base & ~0xfff) + 0x1000
self.state.heap.mmap_base = new_base
return addr
|
iamahuman/angr
|
angr/procedures/win32/VirtualAlloc.py
|
Python
|
bsd-2-clause
| 3,923
|
"""Support for the (unofficial) Tado API."""
from datetime import timedelta
import logging
import urllib
from PyTado.interface import Tado
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.dispatcher import dispatcher_send
from homeassistant.util import Throttle
from .const import CONF_FALLBACK, DATA
_LOGGER = logging.getLogger(__name__)
DOMAIN = "tado"
SIGNAL_TADO_UPDATE_RECEIVED = "tado_update_received_{}_{}"
TADO_COMPONENTS = ["sensor", "climate", "water_heater"]
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=10)
SCAN_INTERVAL = timedelta(seconds=15)
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_FALLBACK, default=True): cv.boolean,
}
],
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up of the Tado component."""
acc_list = config[DOMAIN]
api_data_list = []
for acc in acc_list:
username = acc[CONF_USERNAME]
password = acc[CONF_PASSWORD]
fallback = acc[CONF_FALLBACK]
tadoconnector = TadoConnector(hass, username, password, fallback)
if not tadoconnector.setup():
continue
# Do first update
tadoconnector.update()
api_data_list.append(tadoconnector)
# Poll for updates in the background
hass.helpers.event.track_time_interval(
# we're using here tadoconnector as a parameter of lambda
# to capture actual value instead of closuring of latest value
lambda now, tc=tadoconnector: tc.update(),
SCAN_INTERVAL,
)
hass.data[DOMAIN] = {}
hass.data[DOMAIN][DATA] = api_data_list
# Load components
for component in TADO_COMPONENTS:
load_platform(
hass, component, DOMAIN, {}, config,
)
return True
class TadoConnector:
"""An object to store the Tado data."""
def __init__(self, hass, username, password, fallback):
"""Initialize Tado Connector."""
self.hass = hass
self._username = username
self._password = password
self._fallback = fallback
self.device_id = None
self.tado = None
self.zones = None
self.devices = None
self.data = {
"zone": {},
"device": {},
}
@property
def fallback(self):
"""Return fallback flag to Smart Schedule."""
return self._fallback
def setup(self):
"""Connect to Tado and fetch the zones."""
try:
self.tado = Tado(self._username, self._password)
except (RuntimeError, urllib.error.HTTPError) as exc:
_LOGGER.error("Unable to connect: %s", exc)
return False
self.tado.setDebugging(True)
# Load zones and devices
self.zones = self.tado.getZones()
self.devices = self.tado.getMe()["homes"]
self.device_id = self.devices[0]["id"]
return True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Update the registered zones."""
for zone in self.zones:
self.update_sensor("zone", zone["id"])
for device in self.devices:
self.update_sensor("device", device["id"])
def update_sensor(self, sensor_type, sensor):
"""Update the internal data from Tado."""
_LOGGER.debug("Updating %s %s", sensor_type, sensor)
try:
if sensor_type == "zone":
data = self.tado.getState(sensor)
elif sensor_type == "device":
data = self.tado.getDevices()[0]
else:
_LOGGER.debug("Unknown sensor: %s", sensor_type)
return
except RuntimeError:
_LOGGER.error(
"Unable to connect to Tado while updating %s %s", sensor_type, sensor,
)
return
self.data[sensor_type][sensor] = data
_LOGGER.debug("Dispatching update to %s %s: %s", sensor_type, sensor, data)
dispatcher_send(
self.hass, SIGNAL_TADO_UPDATE_RECEIVED.format(sensor_type, sensor)
)
def get_capabilities(self, zone_id):
"""Return the capabilities of the devices."""
return self.tado.getCapabilities(zone_id)
def reset_zone_overlay(self, zone_id):
"""Reset the zone back to the default operation."""
self.tado.resetZoneOverlay(zone_id)
self.update_sensor("zone", zone_id)
def set_zone_overlay(
self,
zone_id,
overlay_mode,
temperature=None,
duration=None,
device_type="HEATING",
mode=None,
):
"""Set a zone overlay."""
_LOGGER.debug(
"Set overlay for zone %s: mode=%s, temp=%s, duration=%s, type=%s, mode=%s",
zone_id,
overlay_mode,
temperature,
duration,
device_type,
mode,
)
try:
self.tado.setZoneOverlay(
zone_id, overlay_mode, temperature, duration, device_type, "ON", mode
)
except urllib.error.HTTPError as exc:
_LOGGER.error("Could not set zone overlay: %s", exc.read())
self.update_sensor("zone", zone_id)
def set_zone_off(self, zone_id, overlay_mode, device_type="HEATING"):
"""Set a zone to off."""
try:
self.tado.setZoneOverlay(
zone_id, overlay_mode, None, None, device_type, "OFF"
)
except urllib.error.HTTPError as exc:
_LOGGER.error("Could not set zone overlay: %s", exc.read())
self.update_sensor("zone", zone_id)
|
postlund/home-assistant
|
homeassistant/components/tado/__init__.py
|
Python
|
apache-2.0
| 6,029
|
from django.http.response import HttpResponseRedirect
from backend.logic.themes.themes import get_page
from backend.views import view
from django.shortcuts import render
__author__ = 'sdvoynikov'
@view(path=r'^$')
def index(_):
return HttpResponseRedirect('/home/')
@view(path=r'^home/$')
def index(request):
themes, _, _ = get_page(order='downloads', nameFilter='', page=0, count=8, adCount=0, promoCount=0)
return render(request, 'home/index.html', {
'themes': themes
})
|
y-a-r-g/idea-color-themes
|
backend/views/home.py
|
Python
|
mit
| 502
|
# -*- encoding: utf-8 -*-
#
# Copyright 2014-2015 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import shlex
import cotyledon
from cotyledon import oslo_config_glue
from oslo_config import cfg
from oslo_log import log
from oslo_privsep import priv_context
from ceilometer.polling import manager
from ceilometer import service
from ceilometer import utils
LOG = log.getLogger(__name__)
class MultiChoicesOpt(cfg.Opt):
def __init__(self, name, choices=None, **kwargs):
super(MultiChoicesOpt, self).__init__(
name, type=DeduplicatedCfgList(choices), **kwargs)
self.choices = choices
def _get_argparse_kwargs(self, group, **kwargs):
"""Extends the base argparse keyword dict for multi choices options."""
kwargs = super(MultiChoicesOpt, self)._get_argparse_kwargs(group)
kwargs['nargs'] = '+'
choices = kwargs.get('choices', self.choices)
if choices:
kwargs['choices'] = choices
return kwargs
class DeduplicatedCfgList(cfg.types.List):
def __init__(self, choices=None, **kwargs):
super(DeduplicatedCfgList, self).__init__(**kwargs)
self.choices = choices or []
def __call__(self, *args, **kwargs):
result = super(DeduplicatedCfgList, self).__call__(*args, **kwargs)
result_set = set(result)
if len(result) != len(result_set):
LOG.warning("Duplicated values: %s found in CLI options, "
"auto de-duplicated", result)
result = list(result_set)
if self.choices and not (result_set <= set(self.choices)):
raise Exception('Valid values are %s, but found %s'
% (self.choices, result))
return result
CLI_OPTS = [
MultiChoicesOpt('polling-namespaces',
default=['compute', 'central'],
dest='polling_namespaces',
help='Polling namespace(s) to be used while '
'resource polling')
]
def _prepare_config():
conf = cfg.ConfigOpts()
conf.register_cli_opts(CLI_OPTS)
service.prepare_service(conf=conf)
return conf
def create_polling_service(worker_id, conf=None):
if conf is None:
conf = _prepare_config()
conf.log_opt_values(LOG, log.DEBUG)
return manager.AgentManager(worker_id, conf, conf.polling_namespaces)
def main():
sm = cotyledon.ServiceManager()
# On Windows, we can only initialize conf objects in the subprocess.
# As a consequence, we can't use oslo_config_glue.setup() on Windows,
# because cotyledon.ServiceManager objects are not picklable.
if os.name == 'nt':
sm.add(create_polling_service)
else:
conf = _prepare_config()
priv_context.init(root_helper=shlex.split(utils._get_root_helper()))
oslo_config_glue.setup(sm, conf)
sm.add(create_polling_service, args=(conf,))
sm.run()
|
openstack/ceilometer
|
ceilometer/cmd/polling.py
|
Python
|
apache-2.0
| 3,469
|
import numpy as np
import pandas as pd
from sklearn import cross_validation
from sklearn.cross_validation import KFold
from sklearn.ensemble import RandomForestClassifier
from sklearn.grid_search import GridSearchCV, RandomizedSearchCV
from sklearn.metrics import confusion_matrix, accuracy_score
from sklearn.preprocessing import PolynomialFeatures
# For optimal performance, set degree to 3. For speed, set to 1
polynomial_features = PolynomialFeatures(interaction_only=False, include_bias=True, degree=1)
from utilities import (convert_to_words, print_full, get_position_stats, combine_csv, resolve_acc_gyro,
blank_filter, concat_data)
# TODO: duplicate
def update_df(df, index, new_values, reach=8):
#print new_values # This is the value at the index (i.e. the row when the stand_up event was 1)
# need to take this list and lay it over the values in the df at that index
# TODO: catch indexing error
for x in range(0,reach):
amount = reach - x
i = index - (amount*20)
df.loc[i, 'state'] = new_values[x]
for y in range(0,reach):
amount = reach - y
i = index + (amount*20)
df.loc[i, 'state'] = new_values[y+reach]
return df
def trial(df_train, test_data):
"""
Test 1: 1s followed by 3s
"""
my_test_data = test_data.drop(['avg_stand'], axis=1)
y = df_train['state'].values
X = df_train.drop(['avg_stand', 'stand', 'state', 'index'], axis=1)
if X.isnull().values.any() == False:
rf = RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
max_depth=None, max_features='auto', max_leaf_nodes=None,
min_samples_leaf=8, min_samples_split=4,
min_weight_fraction_leaf=0.0, n_estimators=500, n_jobs=-1,
oob_score=False, random_state=None, verbose=0,
warm_start=False)
X = polynomial_features.fit_transform(X)
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.1)
else:
print "Found NaN values"
rf.fit(X_train, y_train)
p_test_data = polynomial_features.fit_transform(my_test_data)
rf_pred2 = rf.predict(p_test_data)
print rf_pred2
test_data['state'] = rf_pred2
final_prediction = convert_to_words(rf_pred2)
print_full(final_prediction)
get_position_stats(final_prediction)
return test_data
def trial_standup(df_train, test_data):
"""
Test 1: 1s followed by 3s
"""
y = df_train['avg_stand'].values
X = df_train.drop(['avg_stand', 'stand', 'state', 'index'], axis=1)
if X.isnull().values.any() == False:
rf = RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
max_depth=None, max_features='auto', max_leaf_nodes=None,
min_samples_leaf=8, min_samples_split=4,
min_weight_fraction_leaf=0.0, n_estimators=500, n_jobs=-1,
oob_score=False, random_state=None, verbose=0,
warm_start=False)
X = polynomial_features.fit_transform(X)
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.1)
else:
print "Found NaN values"
rf.fit(X_train, y_train)
p_test_data = polynomial_features.fit_transform(test_data)
rf_pred2 = rf.predict(p_test_data)
print rf_pred2
test_data['avg_stand'] = rf_pred2
final_prediction = convert_to_words(rf_pred2)
print_full(final_prediction)
get_position_stats(final_prediction)
# Now we have the estimated stand_up values, we use them to create a new feature
# in the original df
# rf_pred3 = rf_pred2.astype(int)
return test_data
def test_model(df_train):
"""check model accuracy"""
y = df_train['state'].values
X = df_train.drop(['state', 'index'], axis=1)
if X.isnull().values.any() == False:
rf = RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
max_depth=None, max_features='auto', max_leaf_nodes=None,
min_samples_leaf=8, min_samples_split=4,
min_weight_fraction_leaf=0.0, n_estimators=500, n_jobs=-1,
oob_score=False, random_state=None, verbose=0,
warm_start=False)
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.1)
rf.fit(X_train, y_train)
rf_pred = rf.predict(X_test)
rf_scores = cross_validation.cross_val_score(
rf, X, df_train.state, cv=10, scoring='accuracy')
print 'rf prediction: {}'.format(accuracy_score(y_test, rf_pred))
print("Random Forest Accuracy: %0.2f (+/- %0.2f)" % (rf_scores.mean(), rf_scores.std() * 2))
importances = rf.feature_importances_
std = np.std([tree.feature_importances_ for tree in rf.estimators_],
axis=0)
indices = np.argsort(importances)[::-1]
# Print the feature ranking
print("Feature ranking:")
for f in range(X.shape[1]):
print("%d. feature %s (%f)" % (f + 1, X.columns[indices[f]], importances[indices[f]]))
def test_model_stand(df_train):
"""check model accuracy"""
y = df_train['avg_stand'].values
X = df_train.drop(['avg_stand', 'stand', 'state', 'index'], axis=1)
if X.isnull().values.any() == False:
rf = RandomForestClassifier(bootstrap=True, class_weight=None, criterion='gini',
max_depth=None, max_features='auto', max_leaf_nodes=None,
min_samples_leaf=8, min_samples_split=4,
min_weight_fraction_leaf=0.0, n_estimators=500, n_jobs=-1,
oob_score=False, random_state=None, verbose=0,
warm_start=False)
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.1)
rf.fit(X_train, y_train)
rf_pred = rf.predict(X_test)
rf_scores = cross_validation.cross_val_score(
rf, X, df_train.state, cv=10, scoring='accuracy')
print 'rf prediction: {}'.format(accuracy_score(y_test, rf_pred))
print("Random Forest Accuracy: %0.2f (+/- %0.2f)" % (rf_scores.mean(), rf_scores.std() * 2))
importances = rf.feature_importances_
std = np.std([tree.feature_importances_ for tree in rf.estimators_],
axis=0)
indices = np.argsort(importances)[::-1]
# Print the feature ranking
print("Feature ranking:")
for f in range(X.shape[1]):
print("%d. feature %s (%f)" % (f + 1, X.columns[indices[f]], importances[indices[f]]))
|
ChristopherGS/sensor_readings
|
ML_Sandbox/algorithm_tests.py
|
Python
|
bsd-3-clause
| 6,621
|
from __future__ import absolute_import
from __future__ import print_function
import argparse
import os
import sys
import ruamel
from aetros import api
from aetros.utils import read_home_config
class InitCommand:
def __init__(self, logger):
self.logger = logger
self.client = None
self.registered = False
self.active = True
def main(self, args):
import aetros.const
parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
prog=aetros.const.__prog__ + ' init')
parser.add_argument('name', help="Model name")
parser.add_argument('directory', nargs='?', help="Directory, default in current.")
parser.add_argument('--organisation', '-o', help="Create the model in the organisation instead of the user account.")
parser.add_argument('--space', '-s', help="Create the model in given space. If space does not exist, create it.")
parser.add_argument('--private', action='store_true', help="Make the model private. Example: aetros init my-model --private")
parser.add_argument('--force', '-f', action='store_true', help="Force overwriting of already existing configuration file.")
home_config = read_home_config()
parsed_args = parser.parse_args(args)
if not parsed_args.name:
parser.print_help()
sys.exit(1)
path = os.getcwd()
if parsed_args.directory:
path = os.path.abspath(parsed_args.directory)
if os.path.exists(path) and not os.path.isdir(path):
sys.stderr.write('Path already exist and is not a directory: ' + path)
if not os.path.exists(path):
os.makedirs(path)
yaml = ruamel.yaml.YAML()
config = {}
if os.path.exists(path+'/aetros.yml'):
with open(path+'/aetros.yml', 'r') as f:
config = yaml.load(f)
if isinstance(config, dict) and 'model' in config and not parsed_args.force:
print("failed: aetros.yml already exists in with a linked model to " + config['model']+ '. Use -f to force.')
sys.exit(1)
if not parsed_args.private:
print("Warning: creating public model. Use --private to create private models.")
if '/' in parsed_args.name:
sys.stderr.write('No / allowed in name. Use -o if thie model should be created in an organisation.')
sys.exit(1)
response = api.create_model(parsed_args.name or (os.path.basename(os.getcwd())), parsed_args.organisation, parsed_args.space, parsed_args.private)
name = response['name']
if response['already_exists']:
print("Notice: Model already exists remotely.")
config['model'] = name
with open(path + '/aetros.yml', 'w+') as f:
yaml.dump(config, f)
print("aetros.yml created and linked with model " + name + ' in ' + path)
print("Open AETROS Trainer to see the model at https://" + home_config['host'] + '/model/' + name)
git_remote_url = 'git@%s:%s.git' % (home_config['host'], name)
print("Use git to store your source code. Each model has its own Git repository.")
print(" $ cd " + path)
print(" $ git init")
print(" $ git remote add origin " + git_remote_url)
print(" $ git add .")
print(" $ git commit -m 'first commit'")
print(" $ git push origin master")
|
aetros/aetros-cli
|
aetros/commands/InitCommand.py
|
Python
|
mit
| 3,478
|
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
LIGHT_BLUE = (168, 201, 251)
DARK_BLUE = (111, 105, 214)
RIGHT = 'right'
LEFT = 'left'
STAND = 'stand'
SIZE = 16
MOVE_DISTANCE = 4
STANDARD_SCROLL = 10
STANDARD_BACKWARD_SCROLL = -5
|
nylo-andry/run-jump-damn
|
constants.py
|
Python
|
mit
| 227
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Scientific Package. This package holds all simulators, and
# analysers necessary to run brain-simulations. You can use it stand alone or
# in conjunction with TheVirtualBrain-Framework Package. See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
Created on Mar 20, 2013
.. moduleauthor:: Bogdan Neacsa <bogdan.neacsa@codemart.ro>
"""
if __name__ == "__main__":
from tvb.tests.library import setup_test_console_env
setup_test_console_env()
import numpy
import unittest
from tvb.datatypes import spectral, time_series
from tvb.tests.library.base_testcase import BaseTestCase
class SpectralTest(BaseTestCase):
"""
Tests the defaults for `tvb.datatypes.spectral` module.
"""
def test_fourierspectrum(self):
data = numpy.random.random((10, 10))
ts = time_series.TimeSeries(data=data)
dt = spectral.FourierSpectrum(source=ts,
segment_length=100)
dt.configure()
summary_info = dt.summary_info
self.assertEqual(summary_info['Frequency step'], 0.01)
self.assertEqual(summary_info['Maximum frequency'], 0.5)
self.assertEqual(summary_info['Segment length'], 100)
self.assertEqual(summary_info['Windowing function'], '')
self.assertEqual(summary_info['Source'], '')
self.assertEqual(summary_info['Spectral type'], 'FourierSpectrum')
self.assertTrue(dt.aggregation_functions is None)
self.assertEqual(dt.normalised_average_power.shape, (0, ))
self.assertEqual(dt.segment_length, 100.0)
self.assertEqual(dt.shape, (0, ))
self.assertTrue(dt.source is not None)
self.assertEqual(dt.windowing_function, '')
def test_waveletcoefficients(self):
data = numpy.random.random((10, 10))
ts = time_series.TimeSeries(data=data)
dt = spectral.WaveletCoefficients(source=ts,
mother='morlet',
sample_period=7.8125,
frequencies=[0.008, 0.028, 0.048, 0.068],
normalisation="energy",
q_ratio=5.0,
array_data=numpy.random.random((10, 10)),)
dt.configure()
summary_info = dt.summary_info
self.assertEqual(summary_info['Maximum frequency'], 0.068)
self.assertEqual(summary_info['Minimum frequency'], 0.008)
self.assertEqual(summary_info['Normalisation'], 'energy')
self.assertEqual(summary_info['Number of scales'], 4)
self.assertEqual(summary_info['Q-ratio'], 5.0)
self.assertEqual(summary_info['Sample period'], 7.8125)
self.assertEqual(summary_info['Spectral type'], 'WaveletCoefficients')
self.assertEqual(summary_info['Wavelet type'], 'morlet')
self.assertEqual(dt.q_ratio, 5.0)
self.assertEqual(dt.sample_period, 7.8125)
self.assertEqual(dt.shape, (10, 10))
self.assertTrue(dt.source is not None)
def test_coherencespectrum(self):
data = numpy.random.random((10, 10))
ts = time_series.TimeSeries(data=data)
dt = spectral.CoherenceSpectrum(source=ts,
nfft = 4,
array_data = numpy.random.random((10, 10)),
frequency = numpy.random.random((10,)))
summary_info = dt.summary_info
self.assertEqual(summary_info['Number of frequencies'], 10)
self.assertEqual(summary_info['Spectral type'], 'CoherenceSpectrum')
self.assertEqual(summary_info['FFT length (time-points)'], 4)
self.assertEqual(summary_info['Source'], '')
self.assertEqual(dt.nfft, 4)
self.assertEqual(dt.shape, (10, 10))
self.assertTrue(dt.source is not None)
def test_complexcoherence(self):
data = numpy.random.random((10, 10))
ts = time_series.TimeSeries(data=data)
dt = spectral.ComplexCoherenceSpectrum(source=ts,
array_data = numpy.random.random((10, 10)),
cross_spectrum = numpy.random.random((10, 10)),
epoch_length = 10,
segment_length = 5)
summary_info = dt.summary_info
self.assertEqual(summary_info['Frequency step'], 0.2)
self.assertEqual(summary_info['Maximum frequency'], 0.5)
self.assertEqual(summary_info['Source'], '')
self.assertEqual(summary_info['Spectral type'], 'ComplexCoherenceSpectrum')
self.assertTrue(dt.aggregation_functions is None)
self.assertEqual(dt.epoch_length, 10)
self.assertEqual(dt.segment_length, 5)
self.assertEqual(dt.shape, (10, 10))
self.assertTrue(dt.source is not None)
self.assertEqual(dt.windowing_function, '')
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(SpectralTest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE)
|
stuart-knock/tvb-library
|
tvb/tests/library/datatypes/spectral_test.py
|
Python
|
gpl-2.0
| 6,667
|
import numpy as np
from scipy.signal import fftconvolve
from .Filter import Filter
from . import Tools
class Filter_Rect_LinSpaced(Filter) :
"""
This class define a function of time expanded using linearly spaced rectangular basis functions.
A filter f(t) is defined in the form f(t) = sum_j b_j*rect_j(t),
where b_j is a set of coefficient and rect_j is a set of rectangular basis functions.
All the rectangular functions have the same width.
"""
def __init__(self, length=1000.0, nbBins=30) :
Filter.__init__(self)
self.p_length = length # ms, filter length
self.p_nbBins = nbBins # integer, define the number of bins
# Coefficients b_j that define the shape of the filter f(t)
self.filter_coeff = np.zeros(1) # values of bins
# Auxiliary variables that can be computed using the parameters above
self.bins = [] # ms, vector defining the rectangular basis functions for f(t)
self.support = [] # ms, centers of bins used to define the filter
# Initialize
self.computeBins() # using meta parameters self.metaparam_subthreshold define bins and support.
self.setFilter_toZero() # initialize filter to 0
#############################################################################
# Set functions
#############################################################################
def setFilter_Function(self, f):
"""
Given a function of time f(t), the bins of the filer are initialized accordingly.
For example, if f(t) is an exponential function, the filter will approximate an exponential using rectangular basis functions
"""
self.computeBins()
self.filter_coeff = f(self.support)
def setFilter_Coefficients(self, coeff):
"""
Set the coefficients of the filter (i.e. the values that define the magnitude of each rectangular function)
"""
self.computeBins()
if len(coeff) == self.p_nbBins :
self.filter_coeff = coeff
else :
print ("Error, the number of coefficients do not match the number of basis functions!")
#############################################################################
# Get functions
#############################################################################
def getInterpolatedFilter(self, dt) :
"""
Given a particular dt, the function compute and return the support t and f(t).
"""
self.computeBins()
bins_i = Tools.timeToIndex(self.bins, dt)
if self.p_nbBins == len(self.filter_coeff) :
filter_interpol = np.zeros( (bins_i[-1] - bins_i[0]) )
for i in range(len(self.filter_coeff)) :
lb = int(bins_i[i])
ub = int(bins_i[i+1])
filter_interpol[lb:ub] = self.filter_coeff[i]
filter_interpol_support = np.arange(len(filter_interpol))*dt
return (filter_interpol_support, filter_interpol)
else :
print ("Error: value of the filter coefficients does not match the number of basis functions!")
def getNbOfBasisFunctions(self) :
"""
Return the number of rectangular basis functions used to define the filter.
"""
self.computeBins()
return int(self.p_nbBins)
def getLength(self):
return self.bins[-1]
#############################################################################
# Functions to compute convolutions
#############################################################################
def convolution_Spiketrain_basisfunctions(self, spks, T, dt):
T_i = int(T/dt)
bins_i = Tools.timeToIndex(self.bins, dt)
spks_i = Tools.timeToIndex(spks, dt)
nb_bins = self.getNbOfBasisFunctions()
X = np.zeros( (T_i, nb_bins) )
# Filter the spike train with the first rectangular function (for the other simply shift the solution
tmp = np.zeros( T_i + bins_i[-1] + 1)
for s in spks_i :
lb = s + bins_i[0]
ub = s + bins_i[1]
tmp[lb:ub] += 1
tmp = tmp[:T_i]
# Fill the matrix by shifting the vector tmp
for l in np.arange(nb_bins) :
tmp_shifted = np.concatenate( ( np.zeros( int(bins_i[l]) ), tmp) )
X[:,l] = tmp_shifted[:T_i]
return X
def convolution_ContinuousSignal_basisfunctions(self, I, dt):
T_i = len(I)
bins_i = Tools.timeToIndex(self.bins, dt)
bins_l = self.getNbOfBasisFunctions()
X = np.zeros( (T_i, bins_l) )
I_tmp = np.array(I,dtype='float64')
window = np.ones( bins_i[1] - bins_i[0])
window = np.array(window,dtype='float64')
F_star_I = fftconvolve(window, I_tmp, mode='full')*dt
F_star_I = np.array(F_star_I[:T_i], dtype='double')
for l in np.arange(bins_l) :
F_star_I_shifted = np.concatenate( ( np.zeros( int(bins_i[l]) ), F_star_I) )
X[:,l] = np.array(F_star_I_shifted[:T_i], dtype='double')
return X
########################################################################################
# AUXILIARY METHODS USED BY THIS PARTICULAR IMPLEMENTATION OF FILTER
########################################################################################
def computeBins(self) :
"""
This function compute bins and support given the metaparameters.
"""
self.bins = np.linspace(0.0, self.p_length, self.p_nbBins+1)
self.support = np.array( [ (self.bins[i]+self.bins[i+1])/2 for i in range(len(self.bins)-1) ])
def setMetaParameters(self, length=1000.0, nbBins=10):
"""
Set the parameters defining the rectangular basis functions.
Attention, each time meta parameters are changes, the value of the filer is reset to 0.
"""
self.p_length = length
self.p_nbBins = nbBins
self.computeBins()
self.setFilter_toZero()
|
awakenting/gif_fitting
|
fitgif/Filter_Rect_LinSpaced.py
|
Python
|
gpl-3.0
| 6,768
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Nico Epp and Ralf Funk
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pickle
import requests
import time
from http.server import HTTPServer, BaseHTTPRequestHandler
from typing import List
from .base import TEST_CONFIG
from ..data_sets.base import Request
def _str(req: Request, dict_name: str) -> List[str]:
d = getattr(req, dict_name)
return [
'{}: {}'.format(k, v)
for k, v in sorted(d.items())]
class RequestHandler(BaseHTTPRequestHandler):
def _get_from_data_server(self, ds_url: str, req_class: str, req_n: int) -> Request:
self.log_message('{:25s} | ds_url {} | req_class {} | req_n {}'.format(
'_get_from_data_server', ds_url, req_class, req_n))
t_start = time.perf_counter()
try:
resp = requests.get(
'http://{}:{}/{}/{}/{}'.format(
TEST_CONFIG['DATA_SERVER_ADDRESS'],
TEST_CONFIG['DATA_SERVER_PORT'],
ds_url,
req_class,
req_n),
timeout=TEST_CONFIG['REQ_TIMEOUT'])
except (requests.ConnectionError, requests.Timeout) as err:
raise ValueError(err)
t_end = time.perf_counter()
self.log_message('{:25s} | response code {} in {:5.3f} seconds'.format(
'_get_from_data_server',
resp.status_code,
t_end - t_start))
if not resp.ok:
raise ValueError('status_code {}'.format(resp.status_code))
req = pickle.loads(resp.content)
if not isinstance(req, Request):
raise ValueError('pickled content is of class {}'.format(req.__class__))
return req
def _process_req(self):
self.log_message('{:25s} | START {}'.format('_process_req', '-' * 10))
# build obtained Request object
parts = self.path.split('?', maxsplit=1)
obt_req = Request(method=self.command, url=parts[0])
obt_req.headers = self.headers.as_string()
if len(parts) > 1:
obt_req.query_params = parts[1]
if self.command == 'POST':
try:
body_bytes = self.rfile.read(
int(self.headers.get('content-length', 0)))
obt_req.body_params = body_bytes.decode()
except ValueError as err:
self.log_message('{:25s} | {}'.format('_process_req', err))
self.send_response(400) # 400: bad request
self.end_headers()
return
# get expected Request object
try:
ds_url = obt_req.headers.pop('x-proxy-test-ds-url')
req_class = obt_req.headers.pop('x-proxy-test-req-class')
req_n = int(obt_req.headers.pop('x-proxy-test-req-n'))
exp_req = self._get_from_data_server(ds_url, req_class, req_n)
except (KeyError, IndexError, ValueError) as err:
self.log_message('{:25s} | {}'.format('_process_req', err))
self.send_response(404)
self.end_headers()
return
# this header gives differences
obt_req.headers.pop('content-length', None)
exp_req.headers.pop('content-length', None)
# compare Request objects
if obt_req == exp_req:
self.log_message('{:25s} | requests are equal'.format('_process_req'))
else:
self.log_message('{:25s} | ERROR requests not equal'.format('_process_req'))
if obt_req.method != exp_req.method:
self.log_message('{:25s} | diff method'.format('_process_req'))
self.log_message('{:25s} | obtained {}'.format('_process_req', obt_req.method))
self.log_message('{:25s} | expected {}'.format('_process_req', exp_req.method))
if obt_req.url != exp_req.url:
self.log_message('{:25s} | diff url'.format('_process_req'))
self.log_message('{:25s} | obtained {}'.format('_process_req', obt_req.url))
self.log_message('{:25s} | expected {}'.format('_process_req', exp_req.url))
if obt_req.headers != exp_req.headers:
self.log_message('{:25s} | diff headers'.format('_process_req'))
self.log_message('{:25s} | obtained {:3d} {}'.format(
'_process_req', len(obt_req.headers), _str(obt_req, 'headers')))
self.log_message('{:25s} | expected {:3d} {}'.format(
'_process_req', len(exp_req.headers), _str(exp_req, 'headers')))
if obt_req.query_params != exp_req.query_params:
self.log_message('{:25s} | diff query_params'.format('_process_req'))
self.log_message('{:25s} | obtained {:3d} {}'.format(
'_process_req', len(obt_req.query_params), _str(obt_req, 'query_params')))
self.log_message('{:25s} | expected {:3d} {}'.format(
'_process_req', len(exp_req.query_params), _str(exp_req, 'query_params')))
if obt_req.body_params != exp_req.body_params:
self.log_message('{:25s} | diff body_params'.format('_process_req'))
self.log_message('{:25s} | obtained {:3d} {}'.format(
'_process_req', len(obt_req.body_params), _str(obt_req, 'body_params')))
self.log_message('{:25s} | expected {:3d} {}'.format(
'_process_req', len(exp_req.body_params), _str(exp_req, 'body_params')))
self.send_response(400) # 400: bad request
self.end_headers()
return
self.log_message('{:25s} | END {}'.format('_process_req', '-' * 10))
self.send_response(200)
self.end_headers()
def do_GET(self):
self._process_req()
def do_POST(self):
self._process_req()
def run():
httpd = HTTPServer(
(TEST_CONFIG['DESTINATION_ADDRESS'], TEST_CONFIG['DESTINATION_PORT']),
RequestHandler)
print('starting server on {} ...'.format(httpd.server_address))
httpd.serve_forever()
|
nico-ralf-ii-fpuna/paper
|
waf/test_2_waf_speed/destination.py
|
Python
|
mpl-2.0
| 6,276
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import re
from django import template
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse, NoReverseMatch
register = template.Library()
def create_process_link(match):
s = match.group(0).split('#')[-1]
try:
link = reverse('process_detail', kwargs={'uuid': s})
except NoReverseMatch:
link = '#'
return '<a href="{0}">{1}</a>'.format(link, s)
def create_sample_link(match):
s = match.group(0).split('#')[-1]
try:
link = reverse('sample_detail', kwargs={'uuid': s})
except NoReverseMatch:
link = '#'
return '<a href="{0}">{1}</a>'.format(link, s)
def create_literature_link(match):
s = match.group(0).split('#lit-')[-1]
try:
link = reverse('literature_detail_redirector', kwargs={'pk': s})
except NoReverseMatch:
link = '#'
return '<a href="{0}">literature_{1}</a>'.format(link, s)
def convert_to_links(update):
# links samples
update = re.sub(r"#s\w+", create_sample_link, update)
# links processes
update = re.sub(r"#p\w+", create_process_link, update)
# links literature
update = re.sub(r"#lit-\w+", create_literature_link, update)
return mark_safe(update)
convert_to_links.is_safe = True
register.filter('convert_to_links', convert_to_links)
|
wbg-optronix-lab/emergence-lab
|
project_management/templatetags/dashboard_tags.py
|
Python
|
mit
| 1,405
|
# Copyright 2012 Nebula, Inc.
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from neutronclient.common import exceptions as neutron_exc
from horizon import exceptions
from horizon import forms
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard import policy
from openstack_dashboard.utils import filters
ALLOCATE_URL = "horizon:project:floating_ips:allocate"
class AssociateIPAction(workflows.Action):
use_required_attribute = False
ip_id = forms.ThemableDynamicTypedChoiceField(
label=_("IP Address"),
coerce=filters.get_int_or_uuid,
empty_value=None
)
instance_id = forms.ThemableChoiceField(
label=_("Port to be associated")
)
class Meta(object):
name = _("IP Address")
help_text = _("Select the IP address you wish to associate with "
"the selected instance or port.")
def __init__(self, *args, **kwargs):
super(AssociateIPAction, self).__init__(*args, **kwargs)
# If AssociateIP is invoked from instance menu, instance_id parameter
# is passed in URL. In Neutron based Floating IP implementation
# an association target is not an instance but a port, so we need
# to get an association target based on a received instance_id
# and set the initial value of instance_id ChoiceField.
q_instance_id = self.request.GET.get('instance_id')
q_port_id = self.request.GET.get('port_id')
if policy.check((("network", "create_floatingip"),),
request=self.request):
self.fields['ip_id'].widget.add_item_link = ALLOCATE_URL
if q_instance_id:
targets = self._get_target_list(q_instance_id)
# Setting the initial value here is required to avoid a situation
# where instance_id passed in the URL is used as the initial value
# unexpectedly. (This always happens if the form is invoked from
# the instance table.)
if targets:
self.initial['instance_id'] = targets[0].id
else:
self.initial['instance_id'] = ''
elif q_port_id:
targets = self._get_target_list()
for target in targets:
if target.port_id == q_port_id:
self.initial['instance_id'] = target.id
break
def populate_ip_id_choices(self, request, context):
ips = []
redirect = reverse('horizon:project:floating_ips:index')
try:
ips = api.neutron.tenant_floating_ip_list(self.request)
except neutron_exc.ConnectionFailed:
exceptions.handle(self.request, redirect=redirect)
except Exception:
exceptions.handle(self.request,
_('Unable to retrieve floating IP addresses.'),
redirect=redirect)
options = sorted([(ip.id, ip.ip) for ip in ips if not ip.port_id])
if options:
options.insert(0, ("", _("Select an IP address")))
else:
options = [("", _("No floating IP addresses allocated"))]
return options
@memoized.memoized_method
def _get_target_list(self, instance_id=None):
targets = []
try:
if instance_id:
targets = api.neutron.floating_ip_target_list_by_instance(
self.request, instance_id)
else:
targets = api.neutron.floating_ip_target_list(self.request)
except Exception:
redirect = reverse('horizon:project:floating_ips:index')
exceptions.handle(self.request,
_('Unable to retrieve instance list.'),
redirect=redirect)
return targets
# TODO(amotoki): [drop-nova-network] Rename instance_id to port_id
def populate_instance_id_choices(self, request, context):
q_instance_id = self.request.GET.get('instance_id')
# The reason of specifying an empty tuple when q_instance_id is None
# is to make memoized_method _get_target_list work. Two calls of
# _get_target_list from here and __init__ must have a same arguments.
params = (q_instance_id, ) if q_instance_id else ()
targets = self._get_target_list(*params)
instances = sorted([(target.id, target.name) for target in targets],
# Sort FIP targets by server name for easy browsing
key=lambda x: x[1])
if instances:
instances.insert(0, ("", _("Select a port")))
else:
instances = (("", _("No ports available")),)
return instances
class AssociateIP(workflows.Step):
action_class = AssociateIPAction
contributes = ("ip_id", "instance_id", "ip_address")
def contribute(self, data, context):
context = super(AssociateIP, self).contribute(data, context)
ip_id = data.get('ip_id', None)
if ip_id:
ip_choices = dict(self.action.fields['ip_id'].choices)
context["ip_address"] = ip_choices.get(ip_id, None)
return context
class IPAssociationWorkflow(workflows.Workflow):
slug = "ip_association"
name = _("Manage Floating IP Associations")
finalize_button_name = _("Associate")
success_message = _('IP address %s associated.')
failure_message = _('Unable to associate IP address %s.')
success_url = "horizon:project:floating_ips:index"
default_steps = (AssociateIP,)
def format_status_message(self, message):
if "%s" in message:
return message % self.context.get('ip_address',
_('unknown IP address'))
else:
return message
def handle(self, request, data):
try:
api.neutron.floating_ip_associate(request,
data['ip_id'],
data['instance_id'])
except neutron_exc.Conflict:
msg = _('The requested instance port is already'
' associated with another floating IP.')
exceptions.handle(request, msg)
self.failure_message = msg
return False
except Exception:
exceptions.handle(request)
return False
return True
|
ChameleonCloud/horizon
|
openstack_dashboard/dashboards/project/floating_ips/workflows.py
|
Python
|
apache-2.0
| 7,094
|
"""
input: a loaded image;
output: [[x,y],[width,height]] of the detected mouth area
"""
import cv
def findmouth(img):
# INITIALIZE: loading the classifiers
haarFace = cv.Load('haarcascade_frontalface_default.xml')
haarMouth = cv.Load('haarcascade_mouth.xml')
# running the classifiers
storage = cv.CreateMemStorage()
detectedFace = cv.HaarDetectObjects(img, haarFace, storage)
detectedMouth = cv.HaarDetectObjects(img, haarMouth, storage)
# FACE: find the largest detected face as detected face
maxFaceSize = 0
maxFace = 0
if detectedFace:
for face in detectedFace: # face: [0][0]: x; [0][1]: y; [0][2]: width; [0][3]: height
if face[0][3]* face[0][2] > maxFaceSize:
maxFaceSize = face[0][3]* face[0][2]
maxFace = face
if maxFace == 0: # did not detect face
return 2
def mouth_in_lower_face(mouth,face):
# if the mouth is in the lower 2/5 of the face
# and the lower edge of mouth is above that of the face
# and the horizontal center of the mouth is the center of the face
if (mouth[0][1] > face[0][1] + face[0][3] * 3 / float(5)
and mouth[0][1] + mouth[0][3] < face[0][1] + face[0][3]
and abs((mouth[0][0] + mouth[0][2] / float(2))
- (face[0][0] + face[0][2] / float(2))) < face[0][2] / float(10)):
return True
else:
return False
# FILTER MOUTH
filteredMouth = []
if detectedMouth:
for mouth in detectedMouth:
if mouth_in_lower_face(mouth,maxFace):
filteredMouth.append(mouth)
maxMouthSize = 0
for mouth in filteredMouth:
if mouth[0][3]* mouth[0][2] > maxMouthSize:
maxMouthSize = mouth[0][3]* mouth[0][2]
maxMouth = mouth
try:
return maxMouth
except UnboundLocalError:
return 2
|
divija96/Emotion-Detection
|
code/mouthdetection.py
|
Python
|
gpl-3.0
| 1,760
|
#!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
"""This is the GRR client for thread pools."""
import pickle
import threading
import time
import logging
from grr.client import client
# pylint: disable=unused-import
# Make sure we load the client plugins
from grr.client import client_plugins
# pylint: enable=unused-import
from grr.client import comms
from grr.client import vfs
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import startup
flags.DEFINE_integer("nrclients", 1,
"Number of clients to start")
flags.DEFINE_string("cert_file", "",
"Path to a file that stores all certificates for"
"the client pool.")
flags.DEFINE_bool("enroll_only", False,
"If specified, the script will enroll all clients and exit.")
class PoolGRRClient(client.GRRClient, threading.Thread):
"""A GRR client for running in pool mode."""
def __init__(self, *args, **kw):
"""Constructor."""
super(PoolGRRClient, self).__init__(*args, **kw)
self.daemon = True
self.stop = False
# Is this client already enrolled?
self.enrolled = False
self.common_name = self.client.communicator.common_name
self.private_key = self.client.communicator.private_key
def Run(self):
for status in self.client.Run():
# if the status is 200 we assume we have successfully enrolled.
if status.code == 200:
self.enrolled = True
# Thread should stop now.
if self.stop:
break
def Stop(self):
self.stop = True
def run(self):
self.Run()
def CreateClientPool(n):
"""Create n clients to run in a pool."""
clients = []
# Load previously stored clients.
try:
fd = open(flags.FLAGS.cert_file, "rb")
certificates = pickle.load(fd)
fd.close()
for certificate in certificates:
clients.append(PoolGRRClient(private_key=certificate))
clients_loaded = True
except (IOError, EOFError):
clients_loaded = False
if clients_loaded and len(clients) < n:
raise RuntimeError("Loaded %d clients, but expected %d." %
(len(clients), n))
while len(clients) < n:
# Generate a new RSA key pair for each client.
key = rdfvalue.PEMPrivateKey.GenKey(bits=comms.ClientCommunicator.BITS)
clients.append(PoolGRRClient(private_key=key))
# Start all the clients now.
for c in clients:
c.start()
start_time = time.time()
try:
if flags.FLAGS.enroll_only:
while True:
time.sleep(1)
enrolled = len([x for x in clients if x.enrolled])
if enrolled == n:
logging.info("All clients enrolled, exiting.")
break
else:
logging.info("%s: Enrolled %d/%d clients.", int(time.time()),
enrolled, n)
else:
try:
while True:
time.sleep(100)
except KeyboardInterrupt:
pass
finally:
# Stop all pool clients.
for cl in clients:
cl.Stop()
# Note: code below is going to be executed after SIGTERM is sent to this
# process.
logging.info("Pool done in %s seconds.",
time.time() - start_time)
# The way benchmarking is supposed to work is that we execute poolclient with
# --enroll_only flag, it dumps the certificates to the flags.FLAGS.cert_file.
# Then, all further poolclient invocations just read private keys back
# from that file. Therefore if private keys were loaded from
# flags.FLAGS.cert_file, then there's no need to rewrite it again with the
# same data.
if not clients_loaded:
logging.info("Saving certificates.")
with open(flags.FLAGS.cert_file, "wb") as fd:
pickle.dump([x.private_key for x in clients], fd)
def CheckLocation():
"""Checks that the poolclient is not accidentally ran against production."""
for url in config_lib.CONFIG["Client.control_urls"]:
if "staging" in url or "localhost" in url:
# This is ok.
return
logging.error("Poolclient should only be run against test or staging.")
exit()
def main(unused_argv):
config_lib.CONFIG.AddContext(
"PoolClient Context",
"Context applied when we run the pool client.")
startup.ClientInit()
config_lib.CONFIG.SetWriteBack("/dev/null")
CheckLocation()
# Let the OS handler also handle sleuthkit requests since sleuthkit is not
# thread safe.
tsk = rdfvalue.PathSpec.PathType.TSK
os = rdfvalue.PathSpec.PathType.OS
vfs.VFS_HANDLERS[tsk] = vfs.VFS_HANDLERS[os]
CreateClientPool(flags.FLAGS.nrclients)
if __name__ == "__main__":
flags.StartMain(main)
|
ojengwa/grr
|
client/poolclient.py
|
Python
|
apache-2.0
| 4,657
|
import numpy as np
import pylab as py
# A new data type
fulldata = np.dtype([('Name',np.str_,20),('Period',np.float32),('H',np.float32),('Sigma_m',np.float32),('V',np.float32),('I',np.float32)])
N,P,H,Sm,V,I = np.loadtxt('data.txt',unpack=True,usecols=[0,1,2,3,4,5],dtype=fulldata)
mw = H - 0.41*(V-I)
py.scatter(P,mw)
py.xscale('log')
py.xlim(2,200)
py.xlabel('Period [days]')
py.ylabel('W [mag]')
py.gca().invert_yaxis()
py.show()
exit()
|
wilmarcardonac/hypermcmc
|
data/figure1GEpaper.py
|
Python
|
gpl-2.0
| 453
|
from rest_framework.views import APIView
from rest_framework.response import Response
class DrfJsonEchoViewClass(APIView):
def post(self, request, *args, **kwargs):
# brute-force compatibility with drf 2.x and 3.x
try:
data = request.data
except AttributeError:
data = request.DATA
if data:
out = {
'Request Payload': data,
}
else:
out = {
'Request Payload': '* No data received.',
}
return Response(out)
|
cralston0/gzip-encoding
|
django/drf_views.py
|
Python
|
bsd-3-clause
| 565
|
# -*- coding: utf-8 -*-
#= DESCRIZIONE =================================================================
# 2 7 -2 serpidi
# Questo script non permette ad un player di andare verso una direzione se non
# ha un permesso speciale di Chadyne oppure non uccide il mob di guardia
#= IMPORT ======================================================================
from src.database import database
from src.enums import TO
from src.mob import Mob
from src.room import Room
from src.commands.command_say import command_say
#= COSTANTI ====================================================================
SERPIDE_NERO_PROTO_CODE = "serpidi_mob_serpidenero_36"
#= FUNZIONI ====================================================================
def before_west(entity, from_room, direction, to_room, running, behavioured):
num_players = 0
serpide_nero = None
for contenuto in from_room.iter_contains():
if contenuto.IS_MOB:
if contenuto.code.split("#")[0] == SERPIDE_NERO_PROTO_CODE:
serpide_nero = contenuto.split_entity(1)
elif contenuto.IS_PLAYER:
num_players += 1
if serpide_nero:
if num_players == 1:
to_say = "Ehi! Non ti è concesso passare da qui!"
else:
to_say = "Per andare verso questa direzione ci vuole un permesso speciale che non avrai mai!!!"
command_say(serpide_nero, to_say)
return True
#- Fine Funzione -
|
Onirik79/aaritmud
|
data/proto_rooms/serpidi/serpidi_room_8653_01.py
|
Python
|
gpl-2.0
| 1,462
|
# -*- coding: utf-8 -*-
DESCRIPTION = (
'A flask extension that provides one application programming interface ' +
'to read and write data in different excel file formats' +
''
)
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Flask-Excel'
copyright = '2015-2020 Onni Software Ltd.'
author = 'C.W.'
# The short X.Y version
version = '0.0.7'
# The full version, including alpha/beta/rc tags
release = '0.0.7'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode',]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Extension configuration -------------------------------------------------
# -- Options for intersphinx extension ---------------------------------------
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/3/': None}
# TODO: html_theme not configurable upstream
import sys # noqa
import os # noqa
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes']
html_theme = 'flask_small'
# TODO: DESCRIPTION not configurable upstream
texinfo_documents = [
('index', 'Flask-Excel',
'Flask-Excel Documentation',
'Onni Software Ltd.', 'Flask-Excel',
DESCRIPTION,
'Miscellaneous'),
]
intersphinx_mapping.update({
'pyexcel': ('http://pyexcel.readthedocs.io/en/latest/', None),
'flask': ('https://flask.palletsprojects.com/en/1.1.x/', None)
})
master_doc = "index"
|
chfw/Flask-Excel
|
docs/source/conf.py
|
Python
|
bsd-3-clause
| 3,383
|
__author__ = 'tom1231'
from PyQt4.QtGui import *
from BAL.Interface.DeviceFrame import DeviceFrame, EX_DEV, SMOOTHER
from lxml.etree import SubElement
class VelocitySmoother(DeviceFrame):
def __init__(self,frame, data):
DeviceFrame.__init__(self, EX_DEV, frame, data)
self._diffDriverTopic = 'diff/command'
self._smoothTopic = 'diff/smooth_command'
self._diffDriverFeedback = 'diff/command'
self._diffDriverOdometryFeedback = 'diff/odometry'
self._speedLimitLinear = '0.8'
self._speedLimitAngular = '5.4'
self._accelerationLimitLinear = '0.3'
self._accelerationLimitAngular = '3.5'
self._frequency = '20.0'
self._deceleration = '1.0'
self._robotFeedback = '2'
def getName(self):
return 'velocity_smoother'
def toDict(self):
data = dict()
data['type'] = SMOOTHER
data['diffDriverTopic'] = self._diffDriverTopic
data['smoothTopic'] = self._smoothTopic
data['diffDriverFeedback'] = self._diffDriverFeedback
data['diffDriverOdometryFeedback'] = self._diffDriverOdometryFeedback
data['speedLimitLinear'] = self._speedLimitLinear
data['speedLimitAngular'] = self._speedLimitAngular
data['accelerationLimitLinear'] = self._accelerationLimitLinear
data['accelerationLimitAngular'] = self._accelerationLimitAngular
data['frequency'] = self._frequency
data['deceleration'] = self._deceleration
data['robotFeedback'] = self._robotFeedback
return data
def fromDict(self, data):
self._diffDriverTopic = data['diffDriverTopic']
self._smoothTopic = data['smoothTopic']
self._diffDriverFeedback = data['diffDriverFeedback']
self._diffDriverOdometryFeedback = data['diffDriverOdometryFeedback']
self._speedLimitLinear = data['speedLimitLinear']
self._speedLimitAngular = data['speedLimitAngular']
self._accelerationLimitLinear = data['accelerationLimitLinear']
self._accelerationLimitAngular = data['accelerationLimitAngular']
self._frequency = data['frequency']
self._deceleration = data['deceleration']
self._robotFeedback = data['robotFeedback']
def add(self):
if not self.nameIsValid():
error = QErrorMessage()
error.setWindowTitle("Same name error")
error.showMessage("Name already taken.")
error.exec_()
self._isValid = False
return
self._diffDriverTopic = str(self.diffDriverTopic.text())
self._smoothTopic = str(self.smoothTopic.text())
self._diffDriverFeedback = str(self.diffDriverFeedback.text())
self._diffDriverOdometryFeedback = str(self.diffDriverOdometryFeedback.text())
self._speedLimitLinear = str(self.speedLimitLinear.text())
self._speedLimitAngular = str(self.speedLimitAngular.text())
self._accelerationLimitLinear = str(self.accelerationLimitLinear.text())
self._accelerationLimitAngular = str(self.accelerationLimitAngular.text())
self._frequency = str(self.frequency.text())
self._deceleration = str(self.deceleration.text())
self._robotFeedback = str(self.robotFeedback.itemData(self.robotFeedback.currentIndex()).toString())
self._isValid = True
def showDetails(self, items=None):
self.diffDriverTopic = QLineEdit(self._diffDriverTopic)
self.smoothTopic = QLineEdit(self._smoothTopic)
self.diffDriverFeedback = QLineEdit(self._diffDriverFeedback)
self.diffDriverOdometryFeedback = QLineEdit(self._diffDriverOdometryFeedback)
self.speedLimitLinear = QLineEdit(self._speedLimitLinear)
self.speedLimitAngular = QLineEdit(self._speedLimitAngular)
self.accelerationLimitLinear = QLineEdit(self._accelerationLimitLinear)
self.accelerationLimitAngular = QLineEdit(self._accelerationLimitAngular)
self.frequency = QLineEdit(self._frequency)
self.deceleration = QLineEdit(self._deceleration)
self.robotFeedback = QComboBox()
self.robotFeedback.addItem('None', '0')
self.robotFeedback.addItem('Odometry', '1')
self.robotFeedback.addItem('End robot commands', '2')
self.robotFeedback.setCurrentIndex(int(self._robotFeedback))
self._frame.layout().addRow(QLabel('Differential drive topic: '), self.diffDriverTopic)
self._frame.layout().addRow(QLabel('Differential drive smooth topic: '), self.smoothTopic)
self._frame.layout().addRow(QLabel('Differential drive end robot topic: '), self.diffDriverFeedback)
self._frame.layout().addRow(QLabel('Differential odometry topic: '), self.diffDriverOdometryFeedback)
self._frame.layout().addRow(QLabel('Differential speed limit linear: '), self.speedLimitLinear)
self._frame.layout().addRow(QLabel('Differential speed limit angular: '), self.speedLimitAngular)
self._frame.layout().addRow(QLabel('Differential acceleration limit linear: '), self.accelerationLimitLinear)
self._frame.layout().addRow(QLabel('Differential acceleration limit angular: '), self.accelerationLimitAngular)
self._frame.layout().addRow(QLabel('Frequency rate: '), self.frequency)
self._frame.layout().addRow(QLabel('Deceleration rate: '), self.deceleration)
self._frame.layout().addRow(QLabel('Feedback mode: '), self.robotFeedback)
def printDetails(self):
self._frame.layout().addRow(QLabel('Differential drive topic: '), QLabel(self._diffDriverTopic))
self._frame.layout().addRow(QLabel('Differential drive smooth topic: '), QLabel(self._smoothTopic))
self._frame.layout().addRow(QLabel('Differential drive end robot topic: '), QLabel(self._diffDriverFeedback))
self._frame.layout().addRow(QLabel('Differential odometry topic: '), QLabel(self._diffDriverOdometryFeedback))
self._frame.layout().addRow(QLabel('Differential speed limit linear: '), QLabel(self._speedLimitLinear))
self._frame.layout().addRow(QLabel('Differential speed limit angular: '), QLabel(self._speedLimitAngular))
self._frame.layout().addRow(QLabel('Differential acceleration limit linear: '), QLabel(self._accelerationLimitLinear))
self._frame.layout().addRow(QLabel('Differential acceleration limit angular: '), QLabel(self._accelerationLimitAngular))
self._frame.layout().addRow(QLabel('Frequency rate: '), QLabel(self._frequency))
self._frame.layout().addRow(QLabel('Deceleration rate: '), QLabel(self._deceleration))
robotFeedbackText = 'End robot commands'
if self._robotFeedback == '1':
robotFeedbackText = 'Odometry'
elif self._robotFeedback == '0':
robotFeedbackText = 'None'
self._frame.layout().addRow(QLabel('Feedback mode: '), QLabel(robotFeedbackText))
def saveToFile(self, parent):
element = SubElement(parent, 'include', {
'file': '$(find ric_board)/scripts/velocity_smoother.launch'
})
SubElement(element, 'arg', {
'name': 'raw_cmd_vel_topic',
'value': self._smoothTopic
})
SubElement(element, 'arg', {
'name': 'smooth_cmd_vel_topic',
'value': self._diffDriverTopic
})
SubElement(element, 'arg', {
'name': 'robot_cmd_vel_topic',
'value': self._diffDriverFeedback
})
SubElement(element, 'arg', {
'name': 'odom_topic',
'value': self._diffDriverOdometryFeedback
})
SubElement(element, 'arg', {
'name': 'SPEED_LIM_V',
'value': self._speedLimitLinear
})
SubElement(element, 'arg', {
'name': 'SPEED_LIM_W',
'value': self._speedLimitAngular
})
SubElement(element, 'arg', {
'name': 'ACCEL_LIM_V',
'value': self._accelerationLimitLinear
})
SubElement(element, 'arg', {
'name': 'ACCEL_LIM_W',
'value': self._accelerationLimitAngular
})
SubElement(element, 'arg', {
'name': 'FREQUENCY',
'value': self._frequency
})
SubElement(element, 'arg', {
'name': 'DECEL_FACTOR',
'value': self._deceleration
})
SubElement(element, 'arg', {
'name': 'ROBOT_FEEDBACK',
'value': self._robotFeedback
})
|
robotican/ric
|
ric_board/scripts/RiCConfigurator/BAL/Devices/velocitySmoother.py
|
Python
|
bsd-3-clause
| 8,523
|
#!/usr/bin/env python
#
# Copyright 2016 timercrack
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from pydatacoll.utils.read_config import *
def get_logger(logger_name='main'):
logger = logging.getLogger(logger_name)
if logger.handlers:
return logger
log_file = os.path.join(app_dir.user_log_dir, '{}.log'.format(logger_name))
if not os.path.exists(app_dir.user_log_dir):
os.makedirs(app_dir.user_log_dir)
formatter = logging.Formatter(config.get('LOG', 'format',
fallback="%(asctime)s %(name)s [%(levelname)s] %(message)s"))
file_handler = logging.FileHandler(log_file)
file_handler.setFormatter(formatter)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
logger.setLevel(config.get('LOG', 'level', fallback='ERROR'))
logger.addHandler(file_handler)
logger.addHandler(console_handler)
return logger
|
timercrack/pydatacoll
|
pydatacoll/utils/logger.py
|
Python
|
apache-2.0
| 1,464
|
from threading import Timer
class RepeatedTimer(object):
def __init__(self, interval, function, *args, **kwargs):
# type: (int, callable, tuple[Any], dict[str, Any]) -> None
super(RepeatedTimer, self).__init__()
self.name = None
self.daemon = False
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.is_running = False
self._timer = Timer(self.interval, self._run)
def _run(self):
self.is_running = False
self.start()
self.function(*self.args, **self.kwargs)
def start(self):
if not self.is_running:
self._timer = Timer(self.interval, self._run)
if self.name != None:
self._timer.setName(self.name)
self._timer.setDaemon(self.daemon)
self._timer.start()
self.is_running = True
def stop(self):
self._timer.cancel()
self.is_running = False
def setName(self, name):
self.name = name
self._timer.setName(self.name)
def setDaemon(self, boolean):
self.daemon = boolean
self._timer.setDaemon(self.daemon)
|
FlorianSauer/PyDrop
|
PyDrop/RepeatedTimer.py
|
Python
|
mit
| 1,207
|
import os
import pathlib
from unittest import mock
import pytest
from mopidy.internal import xdg
@pytest.fixture
def environ():
patcher = mock.patch.dict(os.environ, clear=True)
yield patcher.start()
patcher.stop()
def test_cache_dir_default(environ):
assert xdg.get_dirs()["XDG_CACHE_DIR"] == (
pathlib.Path("~/.cache").expanduser()
)
def test_cache_dir_from_env(environ):
os.environ["XDG_CACHE_HOME"] = "/foo/bar"
assert xdg.get_dirs()["XDG_CACHE_DIR"] == pathlib.Path("/foo/bar")
def test_config_dir_default(environ):
assert xdg.get_dirs()["XDG_CONFIG_DIR"] == (
pathlib.Path("~/.config").expanduser()
)
def test_config_dir_from_env(environ):
os.environ["XDG_CONFIG_HOME"] = "/foo/bar"
assert xdg.get_dirs()["XDG_CONFIG_DIR"] == pathlib.Path("/foo/bar")
def test_data_dir_default(environ):
assert xdg.get_dirs()["XDG_DATA_DIR"] == (
pathlib.Path("~/.local/share").expanduser()
)
def test_data_dir_from_env(environ):
os.environ["XDG_DATA_HOME"] = "/foo/bar"
assert xdg.get_dirs()["XDG_DATA_DIR"] == pathlib.Path("/foo/bar")
def test_user_dirs(environ, tmpdir):
os.environ["XDG_CONFIG_HOME"] = str(tmpdir)
with open(os.path.join(str(tmpdir), "user-dirs.dirs"), "wb") as fh:
fh.write(b"# Some comments\n")
fh.write(b'XDG_MUSIC_DIR="$HOME/Music2"\n')
result = xdg.get_dirs()
assert result["XDG_MUSIC_DIR"] == pathlib.Path("~/Music2").expanduser()
assert "XDG_DOWNLOAD_DIR" not in result
def test_user_dirs_when_no_dirs_file(environ, tmpdir):
os.environ["XDG_CONFIG_HOME"] = str(tmpdir)
result = xdg.get_dirs()
assert "XDG_MUSIC_DIR" not in result
assert "XDG_DOWNLOAD_DIR" not in result
|
jcass77/mopidy
|
tests/internal/test_xdg.py
|
Python
|
apache-2.0
| 1,750
|
"""publication_manager URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
|
vivitc/publication-manager
|
publication_manager/urls.py
|
Python
|
apache-2.0
| 775
|
from django.conf import settings
from django.utils.hashcompat import sha_constructor
if not hasattr(settings, 'PHASED_SECRET_DELIMITER'):
settings.PHASED_SECRET_DELIMITER = sha_constructor(getattr(settings, 'SECRET_KEY', '')).hexdigest()
# quoting the sekrit delimiter to make sure Debug Toolbar doesn't render it
settings.PHASED_SECRET_DELIMITER = '"%s"' % settings.PHASED_SECRET_DELIMITER
|
OmarIthawi/django-phased
|
phased/models.py
|
Python
|
bsd-3-clause
| 397
|
#!/usr/bin/python
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# Copyright 2007 Seth Vidal
import sys
import os
sys.path.insert(0,'/usr/share/yum-cli/')
import yum
import yum.Errors
from utils import YumUtilBase
from yum import _
import logging
import rpmUtils
plugin_autodebuginfo_package_name = "yum-plugin-auto-update-debug-info"
class DebugInfoInstall(YumUtilBase):
NAME = 'debuginfo-install'
VERSION = '1.0'
USAGE = """
debuginfo-install: Install debuginfo packages and their dependencies based on
the name of the non-debug package
debuginfo-install [options] package1 [package2] [package..]"""
def __init__(self):
YumUtilBase.__init__(self,
DebugInfoInstall.NAME,
DebugInfoInstall.VERSION,
DebugInfoInstall.USAGE)
self.logger = logging.getLogger("yum.verbose.cli.debuginfoinstall")
self.optparser = self.getOptionParser()
opts = self.optparser
# Add util commandline options to the yum-cli ones
if hasattr(self, 'getOptionGroup'):
opts = self.getOptionGroup()
opts.add_option("", "--no-debuginfo-plugin",
action="store_true",
help="Turn off automatic installation/update of the yum debuginfo plugin")
self.main()
def doUtilConfigSetup(self, *args, **kwargs):
""" We override this to get our extra option out. """
opts = YumUtilBase.doUtilConfigSetup(self, *args, **kwargs)
self.no_debuginfo_plugin = opts.no_debuginfo_plugin
return opts
def main(self):
# Parse the commandline option and setup the basics.
opts = self.doUtilConfigSetup()
# Check if there is anything to do.
if len(self.cmds) < 1:
print self.optparser.format_help()
sys.exit(0)
if os.geteuid() != 0:
print >> sys.stderr, "You must be root to run this command."
sys.exit(1)
try:
self.doLock()
except yum.Errors.LockError, e:
self.logger.critical("Another application is holding the yum lock, cannot continue")
sys.exit(1)
# enable the -debuginfo repos for enabled primary repos
repos = {}
for repo in self.repos.listEnabled():
repos[repo.id] = repo
for repoid in repos:
di = '%s-debuginfo' % repoid
if di in repos:
continue
repo = repos[repoid]
for r in self.repos.findRepos(di):
self.logger.log(yum.logginglevels.INFO_2,
_('enabling %s') % r.id)
r.enable()
# Note: This is shared with auto-update-debuginfo
for opt in ['repo_gpgcheck', 'gpgcheck', 'cost',
'skip_if_unavailable']:
if hasattr(r, opt):
setattr(r, opt, getattr(repo, opt))
# Setup yum (Ts, RPM db, Repo & Sack)
self.doUtilYumSetup()
self.debugInfo_main()
if hasattr(self, 'doUtilBuildTransaction'):
errc = self.doUtilBuildTransaction()
if errc:
sys.exit(errc)
else:
try:
self.buildTransaction()
except yum.Errors.YumBaseError, e:
self.logger.critical("Error building transaction: %s" % e)
sys.exit(1)
if len(self.tsInfo) < 1:
print 'No debuginfo packages available to install'
self.doUnlock()
sys.exit()
sys.exit(self.doUtilTransaction())
def di_try_install(self, po):
if po.name.endswith('-debuginfo'): # Wildcard matches produce this
return
di_name = '%s-debuginfo' % po.name
if self.pkgSack.searchNevra(name=di_name, arch=po.arch):
test_name = di_name
ver, rel = po.version, po.release
else:
srpm_data = rpmUtils.miscutils.splitFilename(po.sourcerpm) # take the srpmname
srpm_name, ver, rel = srpm_data[0], srpm_data[1], srpm_data[2]
test_name = '%s-debuginfo' % srpm_name
self.install(name=test_name, arch=po.arch, version=ver, release=rel)
def debugInfo_main(self):
"""for each package specified, walk the package's list of deps and install
all the -debuginfo pkgs that match it and its debuginfo"""
# for each pkg
# add that debuginfo to the ts
# look through that pkgs' deps
# add all the debuginfos for the pkgs providing those deps
for pkgglob in self.cmds:
e, m, u = self.rpmdb.matchPackageNames([pkgglob])
for po in e + m:
try:
self.di_try_install(po)
except yum.Errors.InstallError, e:
self.logger.critical('Could not find debuginfo for main pkg: %s' % po)
# do each of its deps
for (n,f,v) in po.requires:
if n.startswith('rpmlib'):
continue
if n.find('.so') != -1:
for pkgtup in self.rpmdb.whatProvides(n,f,v):
deppo = self.rpmdb.searchPkgTuple(pkgtup)[0]
try:
self.di_try_install(deppo)
except yum.Errors.InstallError, e:
self.logger.critical('Could not find debuginfo pkg for dependency package %s' % deppo)
# This is kinda hacky, accessing the option from the plugins code
# but I'm not sure of a better way of doing it
if not self.no_debuginfo_plugin and self.tsInfo:
try:
self.install(pattern=plugin_autodebuginfo_package_name)
except yum.Errors.InstallError, e:
self.logger.critical('Could not find auto debuginfo plugin')
if __name__ == '__main__':
import locale
# This test needs to be before locale.getpreferredencoding() as that
# does setlocale(LC_CTYPE, "")
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error, ex:
# default to C locale if we get a failure.
print >> sys.stderr, 'Failed to set locale, defaulting to C'
os.environ['LC_ALL'] = 'C'
locale.setlocale(locale.LC_ALL, 'C')
if True: # not sys.stdout.isatty():
import codecs
sys.stdout = codecs.getwriter(locale.getpreferredencoding())(sys.stdout)
sys.stdout.errors = 'replace'
util = DebugInfoInstall()
|
bbradbury/yum-utils
|
debuginfo-install.py
|
Python
|
gpl-2.0
| 7,411
|
# -*- coding: utf-8 -*-
# System modules
import logging
import os
# External modules
# Internal modules
from . import utils
from . import interfaces
from . import numericalschemes
from . import numericalmodel
from . import equations
from . import test_data
from . import test_flow
def runtest(module, verbose=False):
if verbose:
logging.basicConfig(level = logging.DEBUG)
else:
logging.basicConfig(filename=os.devnull) # discard any logging output
# logging.basicConfig(level = logging.INFO)
# run the tests
module.run()
# run all tests
def runall(verbose=False):
for module in [
utils,interfaces,numericalschemes,numericalmodel,equations
]:
runtest(module=module,verbose=verbose)
print()
|
nobodyinperson/python3-numericalmodel
|
tests/__init__.py
|
Python
|
gpl-3.0
| 771
|
#! /usr/bin/env python3
"""
Show and Tell
"""
import showandtell
from bottle import run
# Create the Database
showandtell.db.Base.metadata.create_all()
run(host='localhost', port=8080, debug=True)
|
ColoradoSchoolOfMines/show-and-tell
|
app.py
|
Python
|
gpl-3.0
| 202
|
'''
Berkelium extension demo
========================
Check http://github.com/kivy/kivy-berkelium for more information.
You must have berkelium-1.2 extension installed before running the demo
'''
from kivy.uix.scatter import Scatter
from kivy.uix.floatlayout import FloatLayout
from kivy.app import App
from kivy.ext import load
berkelium = load('berkelium', (1, 2))
urls = (
'http://kivy.org',
'http://www.youtube.com/watch?v=QKh1Rv0PlOQ',
)
class BerkeliumBrowserApp(App):
def build(self):
root = FloatLayout()
size = (1024, 768)
for url in urls:
scatter = Scatter(size=size)
bk = berkelium.Webbrowser(url=url, size=size)
scatter.add_widget(bk)
root.add_widget(scatter)
return root
if __name__ == '__main__':
BerkeliumBrowserApp().run()
|
kivy/kivy-berkelium
|
demo/main.py
|
Python
|
bsd-3-clause
| 841
|
# Compiled by Charles Harris, dated October 3, 2002
# updated to 2002 values by BasSw, 2006
# Updated to 2006 values by Vincent Davis June 2010
# Updated to 2014 values by Joseph Booker, 2015
"""
Fundamental Physical Constants
------------------------------
These constants are taken from CODATA Recommended Values of the Fundamental
Physical Constants 2014.
Object
------
physical_constants : dict
A dictionary containing physical constants. Keys are the names of physical
constants, values are tuples (value, units, precision).
Functions
---------
value(key):
Returns the value of the physical constant(key).
unit(key):
Returns the units of the physical constant(key).
precision(key):
Returns the relative precision of the physical constant(key).
find(sub):
Prints or returns list of keys containing the string sub, default is all.
Source
------
The values of the constants provided at this site are recommended for
international use by CODATA and are the latest available. Termed the "2014
CODATA recommended values," they are generally recognized worldwide for use in
all fields of science and technology. The values became available on 25 June
2015 and replaced the 2010 CODATA set. They are based on all of the data
available through 31 December 2014. The 2014 adjustment was carried out under
the auspices of the CODATA Task Group on Fundamental Constants. Also available
is an introduction to the constants for non-experts at
https://physics.nist.gov/cuu/Constants/introduction.html
References
----------
Theoretical and experimental publications relevant to the fundamental constants
and closely related precision measurements published since the mid 1980s, but
also including many older papers of particular interest, some of which date
back to the 1800s. To search bibliography visit
https://physics.nist.gov/cuu/Constants/
"""
from __future__ import division, print_function, absolute_import
import warnings
from math import pi, sqrt
__all__ = ['physical_constants', 'value', 'unit', 'precision', 'find',
'ConstantWarning']
"""
Source: https://physics.nist.gov/cuu/Constants/
The values of the constants provided at the above site are recommended for
international use by CODATA and are the latest available. Termed the "2006
CODATA recommended values", they are generally recognized worldwide for use
in all fields of science and technology. The values became available in March
2007 and replaced the 2002 CODATA set. They are based on all of the data
available through 31 December 2006. The 2006 adjustment was carried out under
the auspices of the CODATA Task Group on Fundamental Constants.
"""
#
# Source: https://physics.nist.gov/cuu/Constants/
#
# Quantity Value Uncertainty Unit
# ---------------------------------------------------- --------------------- -------------------- -------------
txt2002 = """\
Wien displacement law constant 2.897 7685e-3 0.000 0051e-3 m K
atomic unit of 1st hyperpolarizablity 3.206 361 51e-53 0.000 000 28e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizablity 6.235 3808e-65 0.000 0011e-65 C^4 m^4 J^-3
atomic unit of electric dipole moment 8.478 353 09e-30 0.000 000 73e-30 C m
atomic unit of electric polarizablity 1.648 777 274e-41 0.000 000 016e-41 C^2 m^2 J^-1
atomic unit of electric quadrupole moment 4.486 551 24e-40 0.000 000 39e-40 C m^2
atomic unit of magn. dipole moment 1.854 801 90e-23 0.000 000 16e-23 J T^-1
atomic unit of magn. flux density 2.350 517 42e5 0.000 000 20e5 T
deuteron magn. moment 0.433 073 482e-26 0.000 000 038e-26 J T^-1
deuteron magn. moment to Bohr magneton ratio 0.466 975 4567e-3 0.000 000 0050e-3
deuteron magn. moment to nuclear magneton ratio 0.857 438 2329 0.000 000 0092
deuteron-electron magn. moment ratio -4.664 345 548e-4 0.000 000 050e-4
deuteron-proton magn. moment ratio 0.307 012 2084 0.000 000 0045
deuteron-neutron magn. moment ratio -0.448 206 52 0.000 000 11
electron gyromagn. ratio 1.760 859 74e11 0.000 000 15e11 s^-1 T^-1
electron gyromagn. ratio over 2 pi 28 024.9532 0.0024 MHz T^-1
electron magn. moment -928.476 412e-26 0.000 080e-26 J T^-1
electron magn. moment to Bohr magneton ratio -1.001 159 652 1859 0.000 000 000 0038
electron magn. moment to nuclear magneton ratio -1838.281 971 07 0.000 000 85
electron magn. moment anomaly 1.159 652 1859e-3 0.000 000 0038e-3
electron to shielded proton magn. moment ratio -658.227 5956 0.000 0071
electron to shielded helion magn. moment ratio 864.058 255 0.000 010
electron-deuteron magn. moment ratio -2143.923 493 0.000 023
electron-muon magn. moment ratio 206.766 9894 0.000 0054
electron-neutron magn. moment ratio 960.920 50 0.000 23
electron-proton magn. moment ratio -658.210 6862 0.000 0066
magn. constant 12.566 370 614...e-7 0 N A^-2
magn. flux quantum 2.067 833 72e-15 0.000 000 18e-15 Wb
muon magn. moment -4.490 447 99e-26 0.000 000 40e-26 J T^-1
muon magn. moment to Bohr magneton ratio -4.841 970 45e-3 0.000 000 13e-3
muon magn. moment to nuclear magneton ratio -8.890 596 98 0.000 000 23
muon-proton magn. moment ratio -3.183 345 118 0.000 000 089
neutron gyromagn. ratio 1.832 471 83e8 0.000 000 46e8 s^-1 T^-1
neutron gyromagn. ratio over 2 pi 29.164 6950 0.000 0073 MHz T^-1
neutron magn. moment -0.966 236 45e-26 0.000 000 24e-26 J T^-1
neutron magn. moment to Bohr magneton ratio -1.041 875 63e-3 0.000 000 25e-3
neutron magn. moment to nuclear magneton ratio -1.913 042 73 0.000 000 45
neutron to shielded proton magn. moment ratio -0.684 996 94 0.000 000 16
neutron-electron magn. moment ratio 1.040 668 82e-3 0.000 000 25e-3
neutron-proton magn. moment ratio -0.684 979 34 0.000 000 16
proton gyromagn. ratio 2.675 222 05e8 0.000 000 23e8 s^-1 T^-1
proton gyromagn. ratio over 2 pi 42.577 4813 0.000 0037 MHz T^-1
proton magn. moment 1.410 606 71e-26 0.000 000 12e-26 J T^-1
proton magn. moment to Bohr magneton ratio 1.521 032 206e-3 0.000 000 015e-3
proton magn. moment to nuclear magneton ratio 2.792 847 351 0.000 000 028
proton magn. shielding correction 25.689e-6 0.015e-6
proton-neutron magn. moment ratio -1.459 898 05 0.000 000 34
shielded helion gyromagn. ratio 2.037 894 70e8 0.000 000 18e8 s^-1 T^-1
shielded helion gyromagn. ratio over 2 pi 32.434 1015 0.000 0028 MHz T^-1
shielded helion magn. moment -1.074 553 024e-26 0.000 000 093e-26 J T^-1
shielded helion magn. moment to Bohr magneton ratio -1.158 671 474e-3 0.000 000 014e-3
shielded helion magn. moment to nuclear magneton ratio -2.127 497 723 0.000 000 025
shielded helion to proton magn. moment ratio -0.761 766 562 0.000 000 012
shielded helion to shielded proton magn. moment ratio -0.761 786 1313 0.000 000 0033
shielded helion gyromagn. ratio 2.037 894 70e8 0.000 000 18e8 s^-1 T^-1
shielded helion gyromagn. ratio over 2 pi 32.434 1015 0.000 0028 MHz T^-1
shielded proton magn. moment 1.410 570 47e-26 0.000 000 12e-26 J T^-1
shielded proton magn. moment to Bohr magneton ratio 1.520 993 132e-3 0.000 000 016e-3
shielded proton magn. moment to nuclear magneton ratio 2.792 775 604 0.000 000 030
{220} lattice spacing of silicon 192.015 5965e-12 0.000 0070e-12 m"""
txt2006 = """\
lattice spacing of silicon 192.015 5762 e-12 0.000 0050 e-12 m
alpha particle-electron mass ratio 7294.299 5365 0.000 0031
alpha particle mass 6.644 656 20 e-27 0.000 000 33 e-27 kg
alpha particle mass energy equivalent 5.971 919 17 e-10 0.000 000 30 e-10 J
alpha particle mass energy equivalent in MeV 3727.379 109 0.000 093 MeV
alpha particle mass in u 4.001 506 179 127 0.000 000 000 062 u
alpha particle molar mass 4.001 506 179 127 e-3 0.000 000 000 062 e-3 kg mol^-1
alpha particle-proton mass ratio 3.972 599 689 51 0.000 000 000 41
Angstrom star 1.000 014 98 e-10 0.000 000 90 e-10 m
atomic mass constant 1.660 538 782 e-27 0.000 000 083 e-27 kg
atomic mass constant energy equivalent 1.492 417 830 e-10 0.000 000 074 e-10 J
atomic mass constant energy equivalent in MeV 931.494 028 0.000 023 MeV
atomic mass unit-electron volt relationship 931.494 028 e6 0.000 023 e6 eV
atomic mass unit-hartree relationship 3.423 177 7149 e7 0.000 000 0049 e7 E_h
atomic mass unit-hertz relationship 2.252 342 7369 e23 0.000 000 0032 e23 Hz
atomic mass unit-inverse meter relationship 7.513 006 671 e14 0.000 000 011 e14 m^-1
atomic mass unit-joule relationship 1.492 417 830 e-10 0.000 000 074 e-10 J
atomic mass unit-kelvin relationship 1.080 9527 e13 0.000 0019 e13 K
atomic mass unit-kilogram relationship 1.660 538 782 e-27 0.000 000 083 e-27 kg
atomic unit of 1st hyperpolarizability 3.206 361 533 e-53 0.000 000 081 e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizability 6.235 380 95 e-65 0.000 000 31 e-65 C^4 m^4 J^-3
atomic unit of action 1.054 571 628 e-34 0.000 000 053 e-34 J s
atomic unit of charge 1.602 176 487 e-19 0.000 000 040 e-19 C
atomic unit of charge density 1.081 202 300 e12 0.000 000 027 e12 C m^-3
atomic unit of current 6.623 617 63 e-3 0.000 000 17 e-3 A
atomic unit of electric dipole mom. 8.478 352 81 e-30 0.000 000 21 e-30 C m
atomic unit of electric field 5.142 206 32 e11 0.000 000 13 e11 V m^-1
atomic unit of electric field gradient 9.717 361 66 e21 0.000 000 24 e21 V m^-2
atomic unit of electric polarizability 1.648 777 2536 e-41 0.000 000 0034 e-41 C^2 m^2 J^-1
atomic unit of electric potential 27.211 383 86 0.000 000 68 V
atomic unit of electric quadrupole mom. 4.486 551 07 e-40 0.000 000 11 e-40 C m^2
atomic unit of energy 4.359 743 94 e-18 0.000 000 22 e-18 J
atomic unit of force 8.238 722 06 e-8 0.000 000 41 e-8 N
atomic unit of length 0.529 177 208 59 e-10 0.000 000 000 36 e-10 m
atomic unit of mag. dipole mom. 1.854 801 830 e-23 0.000 000 046 e-23 J T^-1
atomic unit of mag. flux density 2.350 517 382 e5 0.000 000 059 e5 T
atomic unit of magnetizability 7.891 036 433 e-29 0.000 000 027 e-29 J T^-2
atomic unit of mass 9.109 382 15 e-31 0.000 000 45 e-31 kg
atomic unit of momentum 1.992 851 565 e-24 0.000 000 099 e-24 kg m s^-1
atomic unit of permittivity 1.112 650 056... e-10 (exact) F m^-1
atomic unit of time 2.418 884 326 505 e-17 0.000 000 000 016 e-17 s
atomic unit of velocity 2.187 691 2541 e6 0.000 000 0015 e6 m s^-1
Avogadro constant 6.022 141 79 e23 0.000 000 30 e23 mol^-1
Bohr magneton 927.400 915 e-26 0.000 023 e-26 J T^-1
Bohr magneton in eV/T 5.788 381 7555 e-5 0.000 000 0079 e-5 eV T^-1
Bohr magneton in Hz/T 13.996 246 04 e9 0.000 000 35 e9 Hz T^-1
Bohr magneton in inverse meters per tesla 46.686 4515 0.000 0012 m^-1 T^-1
Bohr magneton in K/T 0.671 7131 0.000 0012 K T^-1
Bohr radius 0.529 177 208 59 e-10 0.000 000 000 36 e-10 m
Boltzmann constant 1.380 6504 e-23 0.000 0024 e-23 J K^-1
Boltzmann constant in eV/K 8.617 343 e-5 0.000 015 e-5 eV K^-1
Boltzmann constant in Hz/K 2.083 6644 e10 0.000 0036 e10 Hz K^-1
Boltzmann constant in inverse meters per kelvin 69.503 56 0.000 12 m^-1 K^-1
characteristic impedance of vacuum 376.730 313 461... (exact) ohm
classical electron radius 2.817 940 2894 e-15 0.000 000 0058 e-15 m
Compton wavelength 2.426 310 2175 e-12 0.000 000 0033 e-12 m
Compton wavelength over 2 pi 386.159 264 59 e-15 0.000 000 53 e-15 m
conductance quantum 7.748 091 7004 e-5 0.000 000 0053 e-5 S
conventional value of Josephson constant 483 597.9 e9 (exact) Hz V^-1
conventional value of von Klitzing constant 25 812.807 (exact) ohm
Cu x unit 1.002 076 99 e-13 0.000 000 28 e-13 m
deuteron-electron mag. mom. ratio -4.664 345 537 e-4 0.000 000 039 e-4
deuteron-electron mass ratio 3670.482 9654 0.000 0016
deuteron g factor 0.857 438 2308 0.000 000 0072
deuteron mag. mom. 0.433 073 465 e-26 0.000 000 011 e-26 J T^-1
deuteron mag. mom. to Bohr magneton ratio 0.466 975 4556 e-3 0.000 000 0039 e-3
deuteron mag. mom. to nuclear magneton ratio 0.857 438 2308 0.000 000 0072
deuteron mass 3.343 583 20 e-27 0.000 000 17 e-27 kg
deuteron mass energy equivalent 3.005 062 72 e-10 0.000 000 15 e-10 J
deuteron mass energy equivalent in MeV 1875.612 793 0.000 047 MeV
deuteron mass in u 2.013 553 212 724 0.000 000 000 078 u
deuteron molar mass 2.013 553 212 724 e-3 0.000 000 000 078 e-3 kg mol^-1
deuteron-neutron mag. mom. ratio -0.448 206 52 0.000 000 11
deuteron-proton mag. mom. ratio 0.307 012 2070 0.000 000 0024
deuteron-proton mass ratio 1.999 007 501 08 0.000 000 000 22
deuteron rms charge radius 2.1402 e-15 0.0028 e-15 m
electric constant 8.854 187 817... e-12 (exact) F m^-1
electron charge to mass quotient -1.758 820 150 e11 0.000 000 044 e11 C kg^-1
electron-deuteron mag. mom. ratio -2143.923 498 0.000 018
electron-deuteron mass ratio 2.724 437 1093 e-4 0.000 000 0012 e-4
electron g factor -2.002 319 304 3622 0.000 000 000 0015
electron gyromag. ratio 1.760 859 770 e11 0.000 000 044 e11 s^-1 T^-1
electron gyromag. ratio over 2 pi 28 024.953 64 0.000 70 MHz T^-1
electron mag. mom. -928.476 377 e-26 0.000 023 e-26 J T^-1
electron mag. mom. anomaly 1.159 652 181 11 e-3 0.000 000 000 74 e-3
electron mag. mom. to Bohr magneton ratio -1.001 159 652 181 11 0.000 000 000 000 74
electron mag. mom. to nuclear magneton ratio -1838.281 970 92 0.000 000 80
electron mass 9.109 382 15 e-31 0.000 000 45 e-31 kg
electron mass energy equivalent 8.187 104 38 e-14 0.000 000 41 e-14 J
electron mass energy equivalent in MeV 0.510 998 910 0.000 000 013 MeV
electron mass in u 5.485 799 0943 e-4 0.000 000 0023 e-4 u
electron molar mass 5.485 799 0943 e-7 0.000 000 0023 e-7 kg mol^-1
electron-muon mag. mom. ratio 206.766 9877 0.000 0052
electron-muon mass ratio 4.836 331 71 e-3 0.000 000 12 e-3
electron-neutron mag. mom. ratio 960.920 50 0.000 23
electron-neutron mass ratio 5.438 673 4459 e-4 0.000 000 0033 e-4
electron-proton mag. mom. ratio -658.210 6848 0.000 0054
electron-proton mass ratio 5.446 170 2177 e-4 0.000 000 0024 e-4
electron-tau mass ratio 2.875 64 e-4 0.000 47 e-4
electron to alpha particle mass ratio 1.370 933 555 70 e-4 0.000 000 000 58 e-4
electron to shielded helion mag. mom. ratio 864.058 257 0.000 010
electron to shielded proton mag. mom. ratio -658.227 5971 0.000 0072
electron volt 1.602 176 487 e-19 0.000 000 040 e-19 J
electron volt-atomic mass unit relationship 1.073 544 188 e-9 0.000 000 027 e-9 u
electron volt-hartree relationship 3.674 932 540 e-2 0.000 000 092 e-2 E_h
electron volt-hertz relationship 2.417 989 454 e14 0.000 000 060 e14 Hz
electron volt-inverse meter relationship 8.065 544 65 e5 0.000 000 20 e5 m^-1
electron volt-joule relationship 1.602 176 487 e-19 0.000 000 040 e-19 J
electron volt-kelvin relationship 1.160 4505 e4 0.000 0020 e4 K
electron volt-kilogram relationship 1.782 661 758 e-36 0.000 000 044 e-36 kg
elementary charge 1.602 176 487 e-19 0.000 000 040 e-19 C
elementary charge over h 2.417 989 454 e14 0.000 000 060 e14 A J^-1
Faraday constant 96 485.3399 0.0024 C mol^-1
Faraday constant for conventional electric current 96 485.3401 0.0048 C_90 mol^-1
Fermi coupling constant 1.166 37 e-5 0.000 01 e-5 GeV^-2
fine-structure constant 7.297 352 5376 e-3 0.000 000 0050 e-3
first radiation constant 3.741 771 18 e-16 0.000 000 19 e-16 W m^2
first radiation constant for spectral radiance 1.191 042 759 e-16 0.000 000 059 e-16 W m^2 sr^-1
hartree-atomic mass unit relationship 2.921 262 2986 e-8 0.000 000 0042 e-8 u
hartree-electron volt relationship 27.211 383 86 0.000 000 68 eV
Hartree energy 4.359 743 94 e-18 0.000 000 22 e-18 J
Hartree energy in eV 27.211 383 86 0.000 000 68 eV
hartree-hertz relationship 6.579 683 920 722 e15 0.000 000 000 044 e15 Hz
hartree-inverse meter relationship 2.194 746 313 705 e7 0.000 000 000 015 e7 m^-1
hartree-joule relationship 4.359 743 94 e-18 0.000 000 22 e-18 J
hartree-kelvin relationship 3.157 7465 e5 0.000 0055 e5 K
hartree-kilogram relationship 4.850 869 34 e-35 0.000 000 24 e-35 kg
helion-electron mass ratio 5495.885 2765 0.000 0052
helion mass 5.006 411 92 e-27 0.000 000 25 e-27 kg
helion mass energy equivalent 4.499 538 64 e-10 0.000 000 22 e-10 J
helion mass energy equivalent in MeV 2808.391 383 0.000 070 MeV
helion mass in u 3.014 932 2473 0.000 000 0026 u
helion molar mass 3.014 932 2473 e-3 0.000 000 0026 e-3 kg mol^-1
helion-proton mass ratio 2.993 152 6713 0.000 000 0026
hertz-atomic mass unit relationship 4.439 821 6294 e-24 0.000 000 0064 e-24 u
hertz-electron volt relationship 4.135 667 33 e-15 0.000 000 10 e-15 eV
hertz-hartree relationship 1.519 829 846 006 e-16 0.000 000 000010e-16 E_h
hertz-inverse meter relationship 3.335 640 951... e-9 (exact) m^-1
hertz-joule relationship 6.626 068 96 e-34 0.000 000 33 e-34 J
hertz-kelvin relationship 4.799 2374 e-11 0.000 0084 e-11 K
hertz-kilogram relationship 7.372 496 00 e-51 0.000 000 37 e-51 kg
inverse fine-structure constant 137.035 999 679 0.000 000 094
inverse meter-atomic mass unit relationship 1.331 025 0394 e-15 0.000 000 0019 e-15 u
inverse meter-electron volt relationship 1.239 841 875 e-6 0.000 000 031 e-6 eV
inverse meter-hartree relationship 4.556 335 252 760 e-8 0.000 000 000 030 e-8 E_h
inverse meter-hertz relationship 299 792 458 (exact) Hz
inverse meter-joule relationship 1.986 445 501 e-25 0.000 000 099 e-25 J
inverse meter-kelvin relationship 1.438 7752 e-2 0.000 0025 e-2 K
inverse meter-kilogram relationship 2.210 218 70 e-42 0.000 000 11 e-42 kg
inverse of conductance quantum 12 906.403 7787 0.000 0088 ohm
Josephson constant 483 597.891 e9 0.012 e9 Hz V^-1
joule-atomic mass unit relationship 6.700 536 41 e9 0.000 000 33 e9 u
joule-electron volt relationship 6.241 509 65 e18 0.000 000 16 e18 eV
joule-hartree relationship 2.293 712 69 e17 0.000 000 11 e17 E_h
joule-hertz relationship 1.509 190 450 e33 0.000 000 075 e33 Hz
joule-inverse meter relationship 5.034 117 47 e24 0.000 000 25 e24 m^-1
joule-kelvin relationship 7.242 963 e22 0.000 013 e22 K
joule-kilogram relationship 1.112 650 056... e-17 (exact) kg
kelvin-atomic mass unit relationship 9.251 098 e-14 0.000 016 e-14 u
kelvin-electron volt relationship 8.617 343 e-5 0.000 015 e-5 eV
kelvin-hartree relationship 3.166 8153 e-6 0.000 0055 e-6 E_h
kelvin-hertz relationship 2.083 6644 e10 0.000 0036 e10 Hz
kelvin-inverse meter relationship 69.503 56 0.000 12 m^-1
kelvin-joule relationship 1.380 6504 e-23 0.000 0024 e-23 J
kelvin-kilogram relationship 1.536 1807 e-40 0.000 0027 e-40 kg
kilogram-atomic mass unit relationship 6.022 141 79 e26 0.000 000 30 e26 u
kilogram-electron volt relationship 5.609 589 12 e35 0.000 000 14 e35 eV
kilogram-hartree relationship 2.061 486 16 e34 0.000 000 10 e34 E_h
kilogram-hertz relationship 1.356 392 733 e50 0.000 000 068 e50 Hz
kilogram-inverse meter relationship 4.524 439 15 e41 0.000 000 23 e41 m^-1
kilogram-joule relationship 8.987 551 787... e16 (exact) J
kilogram-kelvin relationship 6.509 651 e39 0.000 011 e39 K
lattice parameter of silicon 543.102 064 e-12 0.000 014 e-12 m
Loschmidt constant (273.15 K, 101.325 kPa) 2.686 7774 e25 0.000 0047 e25 m^-3
mag. constant 12.566 370 614... e-7 (exact) N A^-2
mag. flux quantum 2.067 833 667 e-15 0.000 000 052 e-15 Wb
molar gas constant 8.314 472 0.000 015 J mol^-1 K^-1
molar mass constant 1 e-3 (exact) kg mol^-1
molar mass of carbon-12 12 e-3 (exact) kg mol^-1
molar Planck constant 3.990 312 6821 e-10 0.000 000 0057 e-10 J s mol^-1
molar Planck constant times c 0.119 626 564 72 0.000 000 000 17 J m mol^-1
molar volume of ideal gas (273.15 K, 100 kPa) 22.710 981 e-3 0.000 040 e-3 m^3 mol^-1
molar volume of ideal gas (273.15 K, 101.325 kPa) 22.413 996 e-3 0.000 039 e-3 m^3 mol^-1
molar volume of silicon 12.058 8349 e-6 0.000 0011 e-6 m^3 mol^-1
Mo x unit 1.002 099 55 e-13 0.000 000 53 e-13 m
muon Compton wavelength 11.734 441 04 e-15 0.000 000 30 e-15 m
muon Compton wavelength over 2 pi 1.867 594 295 e-15 0.000 000 047 e-15 m
muon-electron mass ratio 206.768 2823 0.000 0052
muon g factor -2.002 331 8414 0.000 000 0012
muon mag. mom. -4.490 447 86 e-26 0.000 000 16 e-26 J T^-1
muon mag. mom. anomaly 1.165 920 69 e-3 0.000 000 60 e-3
muon mag. mom. to Bohr magneton ratio -4.841 970 49 e-3 0.000 000 12 e-3
muon mag. mom. to nuclear magneton ratio -8.890 597 05 0.000 000 23
muon mass 1.883 531 30 e-28 0.000 000 11 e-28 kg
muon mass energy equivalent 1.692 833 510 e-11 0.000 000 095 e-11 J
muon mass energy equivalent in MeV 105.658 3668 0.000 0038 MeV
muon mass in u 0.113 428 9256 0.000 000 0029 u
muon molar mass 0.113 428 9256 e-3 0.000 000 0029 e-3 kg mol^-1
muon-neutron mass ratio 0.112 454 5167 0.000 000 0029
muon-proton mag. mom. ratio -3.183 345 137 0.000 000 085
muon-proton mass ratio 0.112 609 5261 0.000 000 0029
muon-tau mass ratio 5.945 92 e-2 0.000 97 e-2
natural unit of action 1.054 571 628 e-34 0.000 000 053 e-34 J s
natural unit of action in eV s 6.582 118 99 e-16 0.000 000 16 e-16 eV s
natural unit of energy 8.187 104 38 e-14 0.000 000 41 e-14 J
natural unit of energy in MeV 0.510 998 910 0.000 000 013 MeV
natural unit of length 386.159 264 59 e-15 0.000 000 53 e-15 m
natural unit of mass 9.109 382 15 e-31 0.000 000 45 e-31 kg
natural unit of momentum 2.730 924 06 e-22 0.000 000 14 e-22 kg m s^-1
natural unit of momentum in MeV/c 0.510 998 910 0.000 000 013 MeV/c
natural unit of time 1.288 088 6570 e-21 0.000 000 0018 e-21 s
natural unit of velocity 299 792 458 (exact) m s^-1
neutron Compton wavelength 1.319 590 8951 e-15 0.000 000 0020 e-15 m
neutron Compton wavelength over 2 pi 0.210 019 413 82 e-15 0.000 000 000 31 e-15 m
neutron-electron mag. mom. ratio 1.040 668 82 e-3 0.000 000 25 e-3
neutron-electron mass ratio 1838.683 6605 0.000 0011
neutron g factor -3.826 085 45 0.000 000 90
neutron gyromag. ratio 1.832 471 85 e8 0.000 000 43 e8 s^-1 T^-1
neutron gyromag. ratio over 2 pi 29.164 6954 0.000 0069 MHz T^-1
neutron mag. mom. -0.966 236 41 e-26 0.000 000 23 e-26 J T^-1
neutron mag. mom. to Bohr magneton ratio -1.041 875 63 e-3 0.000 000 25 e-3
neutron mag. mom. to nuclear magneton ratio -1.913 042 73 0.000 000 45
neutron mass 1.674 927 211 e-27 0.000 000 084 e-27 kg
neutron mass energy equivalent 1.505 349 505 e-10 0.000 000 075 e-10 J
neutron mass energy equivalent in MeV 939.565 346 0.000 023 MeV
neutron mass in u 1.008 664 915 97 0.000 000 000 43 u
neutron molar mass 1.008 664 915 97 e-3 0.000 000 000 43 e-3 kg mol^-1
neutron-muon mass ratio 8.892 484 09 0.000 000 23
neutron-proton mag. mom. ratio -0.684 979 34 0.000 000 16
neutron-proton mass ratio 1.001 378 419 18 0.000 000 000 46
neutron-tau mass ratio 0.528 740 0.000 086
neutron to shielded proton mag. mom. ratio -0.684 996 94 0.000 000 16
Newtonian constant of gravitation 6.674 28 e-11 0.000 67 e-11 m^3 kg^-1 s^-2
Newtonian constant of gravitation over h-bar c 6.708 81 e-39 0.000 67 e-39 (GeV/c^2)^-2
nuclear magneton 5.050 783 24 e-27 0.000 000 13 e-27 J T^-1
nuclear magneton in eV/T 3.152 451 2326 e-8 0.000 000 0045 e-8 eV T^-1
nuclear magneton in inverse meters per tesla 2.542 623 616 e-2 0.000 000 064 e-2 m^-1 T^-1
nuclear magneton in K/T 3.658 2637 e-4 0.000 0064 e-4 K T^-1
nuclear magneton in MHz/T 7.622 593 84 0.000 000 19 MHz T^-1
Planck constant 6.626 068 96 e-34 0.000 000 33 e-34 J s
Planck constant in eV s 4.135 667 33 e-15 0.000 000 10 e-15 eV s
Planck constant over 2 pi 1.054 571 628 e-34 0.000 000 053 e-34 J s
Planck constant over 2 pi in eV s 6.582 118 99 e-16 0.000 000 16 e-16 eV s
Planck constant over 2 pi times c in MeV fm 197.326 9631 0.000 0049 MeV fm
Planck length 1.616 252 e-35 0.000 081 e-35 m
Planck mass 2.176 44 e-8 0.000 11 e-8 kg
Planck mass energy equivalent in GeV 1.220 892 e19 0.000 061 e19 GeV
Planck temperature 1.416 785 e32 0.000 071 e32 K
Planck time 5.391 24 e-44 0.000 27 e-44 s
proton charge to mass quotient 9.578 833 92 e7 0.000 000 24 e7 C kg^-1
proton Compton wavelength 1.321 409 8446 e-15 0.000 000 0019 e-15 m
proton Compton wavelength over 2 pi 0.210 308 908 61 e-15 0.000 000 000 30 e-15 m
proton-electron mass ratio 1836.152 672 47 0.000 000 80
proton g factor 5.585 694 713 0.000 000 046
proton gyromag. ratio 2.675 222 099 e8 0.000 000 070 e8 s^-1 T^-1
proton gyromag. ratio over 2 pi 42.577 4821 0.000 0011 MHz T^-1
proton mag. mom. 1.410 606 662 e-26 0.000 000 037 e-26 J T^-1
proton mag. mom. to Bohr magneton ratio 1.521 032 209 e-3 0.000 000 012 e-3
proton mag. mom. to nuclear magneton ratio 2.792 847 356 0.000 000 023
proton mag. shielding correction 25.694 e-6 0.014 e-6
proton mass 1.672 621 637 e-27 0.000 000 083 e-27 kg
proton mass energy equivalent 1.503 277 359 e-10 0.000 000 075 e-10 J
proton mass energy equivalent in MeV 938.272 013 0.000 023 MeV
proton mass in u 1.007 276 466 77 0.000 000 000 10 u
proton molar mass 1.007 276 466 77 e-3 0.000 000 000 10 e-3 kg mol^-1
proton-muon mass ratio 8.880 243 39 0.000 000 23
proton-neutron mag. mom. ratio -1.459 898 06 0.000 000 34
proton-neutron mass ratio 0.998 623 478 24 0.000 000 000 46
proton rms charge radius 0.8768 e-15 0.0069 e-15 m
proton-tau mass ratio 0.528 012 0.000 086
quantum of circulation 3.636 947 5199 e-4 0.000 000 0050 e-4 m^2 s^-1
quantum of circulation times 2 7.273 895 040 e-4 0.000 000 010 e-4 m^2 s^-1
Rydberg constant 10 973 731.568 527 0.000 073 m^-1
Rydberg constant times c in Hz 3.289 841 960 361 e15 0.000 000 000 022 e15 Hz
Rydberg constant times hc in eV 13.605 691 93 0.000 000 34 eV
Rydberg constant times hc in J 2.179 871 97 e-18 0.000 000 11 e-18 J
Sackur-Tetrode constant (1 K, 100 kPa) -1.151 7047 0.000 0044
Sackur-Tetrode constant (1 K, 101.325 kPa) -1.164 8677 0.000 0044
second radiation constant 1.438 7752 e-2 0.000 0025 e-2 m K
shielded helion gyromag. ratio 2.037 894 730 e8 0.000 000 056 e8 s^-1 T^-1
shielded helion gyromag. ratio over 2 pi 32.434 101 98 0.000 000 90 MHz T^-1
shielded helion mag. mom. -1.074 552 982 e-26 0.000 000 030 e-26 J T^-1
shielded helion mag. mom. to Bohr magneton ratio -1.158 671 471 e-3 0.000 000 014 e-3
shielded helion mag. mom. to nuclear magneton ratio -2.127 497 718 0.000 000 025
shielded helion to proton mag. mom. ratio -0.761 766 558 0.000 000 011
shielded helion to shielded proton mag. mom. ratio -0.761 786 1313 0.000 000 0033
shielded proton gyromag. ratio 2.675 153 362 e8 0.000 000 073 e8 s^-1 T^-1
shielded proton gyromag. ratio over 2 pi 42.576 3881 0.000 0012 MHz T^-1
shielded proton mag. mom. 1.410 570 419 e-26 0.000 000 038 e-26 J T^-1
shielded proton mag. mom. to Bohr magneton ratio 1.520 993 128 e-3 0.000 000 017 e-3
shielded proton mag. mom. to nuclear magneton ratio 2.792 775 598 0.000 000 030
speed of light in vacuum 299 792 458 (exact) m s^-1
standard acceleration of gravity 9.806 65 (exact) m s^-2
standard atmosphere 101 325 (exact) Pa
Stefan-Boltzmann constant 5.670 400 e-8 0.000 040 e-8 W m^-2 K^-4
tau Compton wavelength 0.697 72 e-15 0.000 11 e-15 m
tau Compton wavelength over 2 pi 0.111 046 e-15 0.000 018 e-15 m
tau-electron mass ratio 3477.48 0.57
tau mass 3.167 77 e-27 0.000 52 e-27 kg
tau mass energy equivalent 2.847 05 e-10 0.000 46 e-10 J
tau mass energy equivalent in MeV 1776.99 0.29 MeV
tau mass in u 1.907 68 0.000 31 u
tau molar mass 1.907 68 e-3 0.000 31 e-3 kg mol^-1
tau-muon mass ratio 16.8183 0.0027
tau-neutron mass ratio 1.891 29 0.000 31
tau-proton mass ratio 1.893 90 0.000 31
Thomson cross section 0.665 245 8558 e-28 0.000 000 0027 e-28 m^2
triton-electron mag. mom. ratio -1.620 514 423 e-3 0.000 000 021 e-3
triton-electron mass ratio 5496.921 5269 0.000 0051
triton g factor 5.957 924 896 0.000 000 076
triton mag. mom. 1.504 609 361 e-26 0.000 000 042 e-26 J T^-1
triton mag. mom. to Bohr magneton ratio 1.622 393 657 e-3 0.000 000 021 e-3
triton mag. mom. to nuclear magneton ratio 2.978 962 448 0.000 000 038
triton mass 5.007 355 88 e-27 0.000 000 25 e-27 kg
triton mass energy equivalent 4.500 387 03 e-10 0.000 000 22 e-10 J
triton mass energy equivalent in MeV 2808.920 906 0.000 070 MeV
triton mass in u 3.015 500 7134 0.000 000 0025 u
triton molar mass 3.015 500 7134 e-3 0.000 000 0025 e-3 kg mol^-1
triton-neutron mag. mom. ratio -1.557 185 53 0.000 000 37
triton-proton mag. mom. ratio 1.066 639 908 0.000 000 010
triton-proton mass ratio 2.993 717 0309 0.000 000 0025
unified atomic mass unit 1.660 538 782 e-27 0.000 000 083 e-27 kg
von Klitzing constant 25 812.807 557 0.000 018 ohm
weak mixing angle 0.222 55 0.000 56
Wien frequency displacement law constant 5.878 933 e10 0.000 010 e10 Hz K^-1
Wien wavelength displacement law constant 2.897 7685 e-3 0.000 0051 e-3 m K"""
txt2010 = """\
{220} lattice spacing of silicon 192.015 5714 e-12 0.000 0032 e-12 m
alpha particle-electron mass ratio 7294.299 5361 0.000 0029
alpha particle mass 6.644 656 75 e-27 0.000 000 29 e-27 kg
alpha particle mass energy equivalent 5.971 919 67 e-10 0.000 000 26 e-10 J
alpha particle mass energy equivalent in MeV 3727.379 240 0.000 082 MeV
alpha particle mass in u 4.001 506 179 125 0.000 000 000 062 u
alpha particle molar mass 4.001 506 179 125 e-3 0.000 000 000 062 e-3 kg mol^-1
alpha particle-proton mass ratio 3.972 599 689 33 0.000 000 000 36
Angstrom star 1.000 014 95 e-10 0.000 000 90 e-10 m
atomic mass constant 1.660 538 921 e-27 0.000 000 073 e-27 kg
atomic mass constant energy equivalent 1.492 417 954 e-10 0.000 000 066 e-10 J
atomic mass constant energy equivalent in MeV 931.494 061 0.000 021 MeV
atomic mass unit-electron volt relationship 931.494 061 e6 0.000 021 e6 eV
atomic mass unit-hartree relationship 3.423 177 6845 e7 0.000 000 0024 e7 E_h
atomic mass unit-hertz relationship 2.252 342 7168 e23 0.000 000 0016 e23 Hz
atomic mass unit-inverse meter relationship 7.513 006 6042 e14 0.000 000 0053 e14 m^-1
atomic mass unit-joule relationship 1.492 417 954 e-10 0.000 000 066 e-10 J
atomic mass unit-kelvin relationship 1.080 954 08 e13 0.000 000 98 e13 K
atomic mass unit-kilogram relationship 1.660 538 921 e-27 0.000 000 073 e-27 kg
atomic unit of 1st hyperpolarizability 3.206 361 449 e-53 0.000 000 071 e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizability 6.235 380 54 e-65 0.000 000 28 e-65 C^4 m^4 J^-3
atomic unit of action 1.054 571 726 e-34 0.000 000 047 e-34 J s
atomic unit of charge 1.602 176 565 e-19 0.000 000 035 e-19 C
atomic unit of charge density 1.081 202 338 e12 0.000 000 024 e12 C m^-3
atomic unit of current 6.623 617 95 e-3 0.000 000 15 e-3 A
atomic unit of electric dipole mom. 8.478 353 26 e-30 0.000 000 19 e-30 C m
atomic unit of electric field 5.142 206 52 e11 0.000 000 11 e11 V m^-1
atomic unit of electric field gradient 9.717 362 00 e21 0.000 000 21 e21 V m^-2
atomic unit of electric polarizability 1.648 777 2754 e-41 0.000 000 0016 e-41 C^2 m^2 J^-1
atomic unit of electric potential 27.211 385 05 0.000 000 60 V
atomic unit of electric quadrupole mom. 4.486 551 331 e-40 0.000 000 099 e-40 C m^2
atomic unit of energy 4.359 744 34 e-18 0.000 000 19 e-18 J
atomic unit of force 8.238 722 78 e-8 0.000 000 36 e-8 N
atomic unit of length 0.529 177 210 92 e-10 0.000 000 000 17 e-10 m
atomic unit of mag. dipole mom. 1.854 801 936 e-23 0.000 000 041 e-23 J T^-1
atomic unit of mag. flux density 2.350 517 464 e5 0.000 000 052 e5 T
atomic unit of magnetizability 7.891 036 607 e-29 0.000 000 013 e-29 J T^-2
atomic unit of mass 9.109 382 91 e-31 0.000 000 40 e-31 kg
atomic unit of mom.um 1.992 851 740 e-24 0.000 000 088 e-24 kg m s^-1
atomic unit of permittivity 1.112 650 056... e-10 (exact) F m^-1
atomic unit of time 2.418 884 326 502e-17 0.000 000 000 012e-17 s
atomic unit of velocity 2.187 691 263 79 e6 0.000 000 000 71 e6 m s^-1
Avogadro constant 6.022 141 29 e23 0.000 000 27 e23 mol^-1
Bohr magneton 927.400 968 e-26 0.000 020 e-26 J T^-1
Bohr magneton in eV/T 5.788 381 8066 e-5 0.000 000 0038 e-5 eV T^-1
Bohr magneton in Hz/T 13.996 245 55 e9 0.000 000 31 e9 Hz T^-1
Bohr magneton in inverse meters per tesla 46.686 4498 0.000 0010 m^-1 T^-1
Bohr magneton in K/T 0.671 713 88 0.000 000 61 K T^-1
Bohr radius 0.529 177 210 92 e-10 0.000 000 000 17 e-10 m
Boltzmann constant 1.380 6488 e-23 0.000 0013 e-23 J K^-1
Boltzmann constant in eV/K 8.617 3324 e-5 0.000 0078 e-5 eV K^-1
Boltzmann constant in Hz/K 2.083 6618 e10 0.000 0019 e10 Hz K^-1
Boltzmann constant in inverse meters per kelvin 69.503 476 0.000 063 m^-1 K^-1
characteristic impedance of vacuum 376.730 313 461... (exact) ohm
classical electron radius 2.817 940 3267 e-15 0.000 000 0027 e-15 m
Compton wavelength 2.426 310 2389 e-12 0.000 000 0016 e-12 m
Compton wavelength over 2 pi 386.159 268 00 e-15 0.000 000 25 e-15 m
conductance quantum 7.748 091 7346 e-5 0.000 000 0025 e-5 S
conventional value of Josephson constant 483 597.9 e9 (exact) Hz V^-1
conventional value of von Klitzing constant 25 812.807 (exact) ohm
Cu x unit 1.002 076 97 e-13 0.000 000 28 e-13 m
deuteron-electron mag. mom. ratio -4.664 345 537 e-4 0.000 000 039 e-4
deuteron-electron mass ratio 3670.482 9652 0.000 0015
deuteron g factor 0.857 438 2308 0.000 000 0072
deuteron mag. mom. 0.433 073 489 e-26 0.000 000 010 e-26 J T^-1
deuteron mag. mom. to Bohr magneton ratio 0.466 975 4556 e-3 0.000 000 0039 e-3
deuteron mag. mom. to nuclear magneton ratio 0.857 438 2308 0.000 000 0072
deuteron mass 3.343 583 48 e-27 0.000 000 15 e-27 kg
deuteron mass energy equivalent 3.005 062 97 e-10 0.000 000 13 e-10 J
deuteron mass energy equivalent in MeV 1875.612 859 0.000 041 MeV
deuteron mass in u 2.013 553 212 712 0.000 000 000 077 u
deuteron molar mass 2.013 553 212 712 e-3 0.000 000 000 077 e-3 kg mol^-1
deuteron-neutron mag. mom. ratio -0.448 206 52 0.000 000 11
deuteron-proton mag. mom. ratio 0.307 012 2070 0.000 000 0024
deuteron-proton mass ratio 1.999 007 500 97 0.000 000 000 18
deuteron rms charge radius 2.1424 e-15 0.0021 e-15 m
electric constant 8.854 187 817... e-12 (exact) F m^-1
electron charge to mass quotient -1.758 820 088 e11 0.000 000 039 e11 C kg^-1
electron-deuteron mag. mom. ratio -2143.923 498 0.000 018
electron-deuteron mass ratio 2.724 437 1095 e-4 0.000 000 0011 e-4
electron g factor -2.002 319 304 361 53 0.000 000 000 000 53
electron gyromag. ratio 1.760 859 708 e11 0.000 000 039 e11 s^-1 T^-1
electron gyromag. ratio over 2 pi 28 024.952 66 0.000 62 MHz T^-1
electron-helion mass ratio 1.819 543 0761 e-4 0.000 000 0017 e-4
electron mag. mom. -928.476 430 e-26 0.000 021 e-26 J T^-1
electron mag. mom. anomaly 1.159 652 180 76 e-3 0.000 000 000 27 e-3
electron mag. mom. to Bohr magneton ratio -1.001 159 652 180 76 0.000 000 000 000 27
electron mag. mom. to nuclear magneton ratio -1838.281 970 90 0.000 000 75
electron mass 9.109 382 91 e-31 0.000 000 40 e-31 kg
electron mass energy equivalent 8.187 105 06 e-14 0.000 000 36 e-14 J
electron mass energy equivalent in MeV 0.510 998 928 0.000 000 011 MeV
electron mass in u 5.485 799 0946 e-4 0.000 000 0022 e-4 u
electron molar mass 5.485 799 0946 e-7 0.000 000 0022 e-7 kg mol^-1
electron-muon mag. mom. ratio 206.766 9896 0.000 0052
electron-muon mass ratio 4.836 331 66 e-3 0.000 000 12 e-3
electron-neutron mag. mom. ratio 960.920 50 0.000 23
electron-neutron mass ratio 5.438 673 4461 e-4 0.000 000 0032 e-4
electron-proton mag. mom. ratio -658.210 6848 0.000 0054
electron-proton mass ratio 5.446 170 2178 e-4 0.000 000 0022 e-4
electron-tau mass ratio 2.875 92 e-4 0.000 26 e-4
electron to alpha particle mass ratio 1.370 933 555 78 e-4 0.000 000 000 55 e-4
electron to shielded helion mag. mom. ratio 864.058 257 0.000 010
electron to shielded proton mag. mom. ratio -658.227 5971 0.000 0072
electron-triton mass ratio 1.819 200 0653 e-4 0.000 000 0017 e-4
electron volt 1.602 176 565 e-19 0.000 000 035 e-19 J
electron volt-atomic mass unit relationship 1.073 544 150 e-9 0.000 000 024 e-9 u
electron volt-hartree relationship 3.674 932 379 e-2 0.000 000 081 e-2 E_h
electron volt-hertz relationship 2.417 989 348 e14 0.000 000 053 e14 Hz
electron volt-inverse meter relationship 8.065 544 29 e5 0.000 000 18 e5 m^-1
electron volt-joule relationship 1.602 176 565 e-19 0.000 000 035 e-19 J
electron volt-kelvin relationship 1.160 4519 e4 0.000 0011 e4 K
electron volt-kilogram relationship 1.782 661 845 e-36 0.000 000 039 e-36 kg
elementary charge 1.602 176 565 e-19 0.000 000 035 e-19 C
elementary charge over h 2.417 989 348 e14 0.000 000 053 e14 A J^-1
Faraday constant 96 485.3365 0.0021 C mol^-1
Faraday constant for conventional electric current 96 485.3321 0.0043 C_90 mol^-1
Fermi coupling constant 1.166 364 e-5 0.000 005 e-5 GeV^-2
fine-structure constant 7.297 352 5698 e-3 0.000 000 0024 e-3
first radiation constant 3.741 771 53 e-16 0.000 000 17 e-16 W m^2
first radiation constant for spectral radiance 1.191 042 869 e-16 0.000 000 053 e-16 W m^2 sr^-1
hartree-atomic mass unit relationship 2.921 262 3246 e-8 0.000 000 0021 e-8 u
hartree-electron volt relationship 27.211 385 05 0.000 000 60 eV
Hartree energy 4.359 744 34 e-18 0.000 000 19 e-18 J
Hartree energy in eV 27.211 385 05 0.000 000 60 eV
hartree-hertz relationship 6.579 683 920 729 e15 0.000 000 000 033 e15 Hz
hartree-inverse meter relationship 2.194 746 313 708 e7 0.000 000 000 011 e7 m^-1
hartree-joule relationship 4.359 744 34 e-18 0.000 000 19 e-18 J
hartree-kelvin relationship 3.157 7504 e5 0.000 0029 e5 K
hartree-kilogram relationship 4.850 869 79 e-35 0.000 000 21 e-35 kg
helion-electron mass ratio 5495.885 2754 0.000 0050
helion g factor -4.255 250 613 0.000 000 050
helion mag. mom. -1.074 617 486 e-26 0.000 000 027 e-26 J T^-1
helion mag. mom. to Bohr magneton ratio -1.158 740 958 e-3 0.000 000 014 e-3
helion mag. mom. to nuclear magneton ratio -2.127 625 306 0.000 000 025
helion mass 5.006 412 34 e-27 0.000 000 22 e-27 kg
helion mass energy equivalent 4.499 539 02 e-10 0.000 000 20 e-10 J
helion mass energy equivalent in MeV 2808.391 482 0.000 062 MeV
helion mass in u 3.014 932 2468 0.000 000 0025 u
helion molar mass 3.014 932 2468 e-3 0.000 000 0025 e-3 kg mol^-1
helion-proton mass ratio 2.993 152 6707 0.000 000 0025
hertz-atomic mass unit relationship 4.439 821 6689 e-24 0.000 000 0031 e-24 u
hertz-electron volt relationship 4.135 667 516 e-15 0.000 000 091 e-15 eV
hertz-hartree relationship 1.519 829 8460045e-16 0.000 000 0000076e-16 E_h
hertz-inverse meter relationship 3.335 640 951... e-9 (exact) m^-1
hertz-joule relationship 6.626 069 57 e-34 0.000 000 29 e-34 J
hertz-kelvin relationship 4.799 2434 e-11 0.000 0044 e-11 K
hertz-kilogram relationship 7.372 496 68 e-51 0.000 000 33 e-51 kg
inverse fine-structure constant 137.035 999 074 0.000 000 044
inverse meter-atomic mass unit relationship 1.331 025 051 20 e-15 0.000 000 000 94 e-15 u
inverse meter-electron volt relationship 1.239 841 930 e-6 0.000 000 027 e-6 eV
inverse meter-hartree relationship 4.556 335 252 755 e-8 0.000 000 000 023 e-8 E_h
inverse meter-hertz relationship 299 792 458 (exact) Hz
inverse meter-joule relationship 1.986 445 684 e-25 0.000 000 088 e-25 J
inverse meter-kelvin relationship 1.438 7770 e-2 0.000 0013 e-2 K
inverse meter-kilogram relationship 2.210 218 902 e-42 0.000 000 098 e-42 kg
inverse of conductance quantum 12 906.403 7217 0.000 0042 ohm
Josephson constant 483 597.870 e9 0.011 e9 Hz V^-1
joule-atomic mass unit relationship 6.700 535 85 e9 0.000 000 30 e9 u
joule-electron volt relationship 6.241 509 34 e18 0.000 000 14 e18 eV
joule-hartree relationship 2.293 712 48 e17 0.000 000 10 e17 E_h
joule-hertz relationship 1.509 190 311 e33 0.000 000 067 e33 Hz
joule-inverse meter relationship 5.034 117 01 e24 0.000 000 22 e24 m^-1
joule-kelvin relationship 7.242 9716 e22 0.000 0066 e22 K
joule-kilogram relationship 1.112 650 056... e-17 (exact) kg
kelvin-atomic mass unit relationship 9.251 0868 e-14 0.000 0084 e-14 u
kelvin-electron volt relationship 8.617 3324 e-5 0.000 0078 e-5 eV
kelvin-hartree relationship 3.166 8114 e-6 0.000 0029 e-6 E_h
kelvin-hertz relationship 2.083 6618 e10 0.000 0019 e10 Hz
kelvin-inverse meter relationship 69.503 476 0.000 063 m^-1
kelvin-joule relationship 1.380 6488 e-23 0.000 0013 e-23 J
kelvin-kilogram relationship 1.536 1790 e-40 0.000 0014 e-40 kg
kilogram-atomic mass unit relationship 6.022 141 29 e26 0.000 000 27 e26 u
kilogram-electron volt relationship 5.609 588 85 e35 0.000 000 12 e35 eV
kilogram-hartree relationship 2.061 485 968 e34 0.000 000 091 e34 E_h
kilogram-hertz relationship 1.356 392 608 e50 0.000 000 060 e50 Hz
kilogram-inverse meter relationship 4.524 438 73 e41 0.000 000 20 e41 m^-1
kilogram-joule relationship 8.987 551 787... e16 (exact) J
kilogram-kelvin relationship 6.509 6582 e39 0.000 0059 e39 K
lattice parameter of silicon 543.102 0504 e-12 0.000 0089 e-12 m
Loschmidt constant (273.15 K, 100 kPa) 2.651 6462 e25 0.000 0024 e25 m^-3
Loschmidt constant (273.15 K, 101.325 kPa) 2.686 7805 e25 0.000 0024 e25 m^-3
mag. constant 12.566 370 614... e-7 (exact) N A^-2
mag. flux quantum 2.067 833 758 e-15 0.000 000 046 e-15 Wb
molar gas constant 8.314 4621 0.000 0075 J mol^-1 K^-1
molar mass constant 1 e-3 (exact) kg mol^-1
molar mass of carbon-12 12 e-3 (exact) kg mol^-1
molar Planck constant 3.990 312 7176 e-10 0.000 000 0028 e-10 J s mol^-1
molar Planck constant times c 0.119 626 565 779 0.000 000 000 084 J m mol^-1
molar volume of ideal gas (273.15 K, 100 kPa) 22.710 953 e-3 0.000 021 e-3 m^3 mol^-1
molar volume of ideal gas (273.15 K, 101.325 kPa) 22.413 968 e-3 0.000 020 e-3 m^3 mol^-1
molar volume of silicon 12.058 833 01 e-6 0.000 000 80 e-6 m^3 mol^-1
Mo x unit 1.002 099 52 e-13 0.000 000 53 e-13 m
muon Compton wavelength 11.734 441 03 e-15 0.000 000 30 e-15 m
muon Compton wavelength over 2 pi 1.867 594 294 e-15 0.000 000 047 e-15 m
muon-electron mass ratio 206.768 2843 0.000 0052
muon g factor -2.002 331 8418 0.000 000 0013
muon mag. mom. -4.490 448 07 e-26 0.000 000 15 e-26 J T^-1
muon mag. mom. anomaly 1.165 920 91 e-3 0.000 000 63 e-3
muon mag. mom. to Bohr magneton ratio -4.841 970 44 e-3 0.000 000 12 e-3
muon mag. mom. to nuclear magneton ratio -8.890 596 97 0.000 000 22
muon mass 1.883 531 475 e-28 0.000 000 096 e-28 kg
muon mass energy equivalent 1.692 833 667 e-11 0.000 000 086 e-11 J
muon mass energy equivalent in MeV 105.658 3715 0.000 0035 MeV
muon mass in u 0.113 428 9267 0.000 000 0029 u
muon molar mass 0.113 428 9267 e-3 0.000 000 0029 e-3 kg mol^-1
muon-neutron mass ratio 0.112 454 5177 0.000 000 0028
muon-proton mag. mom. ratio -3.183 345 107 0.000 000 084
muon-proton mass ratio 0.112 609 5272 0.000 000 0028
muon-tau mass ratio 5.946 49 e-2 0.000 54 e-2
natural unit of action 1.054 571 726 e-34 0.000 000 047 e-34 J s
natural unit of action in eV s 6.582 119 28 e-16 0.000 000 15 e-16 eV s
natural unit of energy 8.187 105 06 e-14 0.000 000 36 e-14 J
natural unit of energy in MeV 0.510 998 928 0.000 000 011 MeV
natural unit of length 386.159 268 00 e-15 0.000 000 25 e-15 m
natural unit of mass 9.109 382 91 e-31 0.000 000 40 e-31 kg
natural unit of mom.um 2.730 924 29 e-22 0.000 000 12 e-22 kg m s^-1
natural unit of mom.um in MeV/c 0.510 998 928 0.000 000 011 MeV/c
natural unit of time 1.288 088 668 33 e-21 0.000 000 000 83 e-21 s
natural unit of velocity 299 792 458 (exact) m s^-1
neutron Compton wavelength 1.319 590 9068 e-15 0.000 000 0011 e-15 m
neutron Compton wavelength over 2 pi 0.210 019 415 68 e-15 0.000 000 000 17 e-15 m
neutron-electron mag. mom. ratio 1.040 668 82 e-3 0.000 000 25 e-3
neutron-electron mass ratio 1838.683 6605 0.000 0011
neutron g factor -3.826 085 45 0.000 000 90
neutron gyromag. ratio 1.832 471 79 e8 0.000 000 43 e8 s^-1 T^-1
neutron gyromag. ratio over 2 pi 29.164 6943 0.000 0069 MHz T^-1
neutron mag. mom. -0.966 236 47 e-26 0.000 000 23 e-26 J T^-1
neutron mag. mom. to Bohr magneton ratio -1.041 875 63 e-3 0.000 000 25 e-3
neutron mag. mom. to nuclear magneton ratio -1.913 042 72 0.000 000 45
neutron mass 1.674 927 351 e-27 0.000 000 074 e-27 kg
neutron mass energy equivalent 1.505 349 631 e-10 0.000 000 066 e-10 J
neutron mass energy equivalent in MeV 939.565 379 0.000 021 MeV
neutron mass in u 1.008 664 916 00 0.000 000 000 43 u
neutron molar mass 1.008 664 916 00 e-3 0.000 000 000 43 e-3 kg mol^-1
neutron-muon mass ratio 8.892 484 00 0.000 000 22
neutron-proton mag. mom. ratio -0.684 979 34 0.000 000 16
neutron-proton mass difference 2.305 573 92 e-30 0.000 000 76 e-30
neutron-proton mass difference energy equivalent 2.072 146 50 e-13 0.000 000 68 e-13
neutron-proton mass difference energy equivalent in MeV 1.293 332 17 0.000 000 42
neutron-proton mass difference in u 0.001 388 449 19 0.000 000 000 45
neutron-proton mass ratio 1.001 378 419 17 0.000 000 000 45
neutron-tau mass ratio 0.528 790 0.000 048
neutron to shielded proton mag. mom. ratio -0.684 996 94 0.000 000 16
Newtonian constant of gravitation 6.673 84 e-11 0.000 80 e-11 m^3 kg^-1 s^-2
Newtonian constant of gravitation over h-bar c 6.708 37 e-39 0.000 80 e-39 (GeV/c^2)^-2
nuclear magneton 5.050 783 53 e-27 0.000 000 11 e-27 J T^-1
nuclear magneton in eV/T 3.152 451 2605 e-8 0.000 000 0022 e-8 eV T^-1
nuclear magneton in inverse meters per tesla 2.542 623 527 e-2 0.000 000 056 e-2 m^-1 T^-1
nuclear magneton in K/T 3.658 2682 e-4 0.000 0033 e-4 K T^-1
nuclear magneton in MHz/T 7.622 593 57 0.000 000 17 MHz T^-1
Planck constant 6.626 069 57 e-34 0.000 000 29 e-34 J s
Planck constant in eV s 4.135 667 516 e-15 0.000 000 091 e-15 eV s
Planck constant over 2 pi 1.054 571 726 e-34 0.000 000 047 e-34 J s
Planck constant over 2 pi in eV s 6.582 119 28 e-16 0.000 000 15 e-16 eV s
Planck constant over 2 pi times c in MeV fm 197.326 9718 0.000 0044 MeV fm
Planck length 1.616 199 e-35 0.000 097 e-35 m
Planck mass 2.176 51 e-8 0.000 13 e-8 kg
Planck mass energy equivalent in GeV 1.220 932 e19 0.000 073 e19 GeV
Planck temperature 1.416 833 e32 0.000 085 e32 K
Planck time 5.391 06 e-44 0.000 32 e-44 s
proton charge to mass quotient 9.578 833 58 e7 0.000 000 21 e7 C kg^-1
proton Compton wavelength 1.321 409 856 23 e-15 0.000 000 000 94 e-15 m
proton Compton wavelength over 2 pi 0.210 308 910 47 e-15 0.000 000 000 15 e-15 m
proton-electron mass ratio 1836.152 672 45 0.000 000 75
proton g factor 5.585 694 713 0.000 000 046
proton gyromag. ratio 2.675 222 005 e8 0.000 000 063 e8 s^-1 T^-1
proton gyromag. ratio over 2 pi 42.577 4806 0.000 0010 MHz T^-1
proton mag. mom. 1.410 606 743 e-26 0.000 000 033 e-26 J T^-1
proton mag. mom. to Bohr magneton ratio 1.521 032 210 e-3 0.000 000 012 e-3
proton mag. mom. to nuclear magneton ratio 2.792 847 356 0.000 000 023
proton mag. shielding correction 25.694 e-6 0.014 e-6
proton mass 1.672 621 777 e-27 0.000 000 074 e-27 kg
proton mass energy equivalent 1.503 277 484 e-10 0.000 000 066 e-10 J
proton mass energy equivalent in MeV 938.272 046 0.000 021 MeV
proton mass in u 1.007 276 466 812 0.000 000 000 090 u
proton molar mass 1.007 276 466 812 e-3 0.000 000 000 090 e-3 kg mol^-1
proton-muon mass ratio 8.880 243 31 0.000 000 22
proton-neutron mag. mom. ratio -1.459 898 06 0.000 000 34
proton-neutron mass ratio 0.998 623 478 26 0.000 000 000 45
proton rms charge radius 0.8775 e-15 0.0051 e-15 m
proton-tau mass ratio 0.528 063 0.000 048
quantum of circulation 3.636 947 5520 e-4 0.000 000 0024 e-4 m^2 s^-1
quantum of circulation times 2 7.273 895 1040 e-4 0.000 000 0047 e-4 m^2 s^-1
Rydberg constant 10 973 731.568 539 0.000 055 m^-1
Rydberg constant times c in Hz 3.289 841 960 364 e15 0.000 000 000 017 e15 Hz
Rydberg constant times hc in eV 13.605 692 53 0.000 000 30 eV
Rydberg constant times hc in J 2.179 872 171 e-18 0.000 000 096 e-18 J
Sackur-Tetrode constant (1 K, 100 kPa) -1.151 7078 0.000 0023
Sackur-Tetrode constant (1 K, 101.325 kPa) -1.164 8708 0.000 0023
second radiation constant 1.438 7770 e-2 0.000 0013 e-2 m K
shielded helion gyromag. ratio 2.037 894 659 e8 0.000 000 051 e8 s^-1 T^-1
shielded helion gyromag. ratio over 2 pi 32.434 100 84 0.000 000 81 MHz T^-1
shielded helion mag. mom. -1.074 553 044 e-26 0.000 000 027 e-26 J T^-1
shielded helion mag. mom. to Bohr magneton ratio -1.158 671 471 e-3 0.000 000 014 e-3
shielded helion mag. mom. to nuclear magneton ratio -2.127 497 718 0.000 000 025
shielded helion to proton mag. mom. ratio -0.761 766 558 0.000 000 011
shielded helion to shielded proton mag. mom. ratio -0.761 786 1313 0.000 000 0033
shielded proton gyromag. ratio 2.675 153 268 e8 0.000 000 066 e8 s^-1 T^-1
shielded proton gyromag. ratio over 2 pi 42.576 3866 0.000 0010 MHz T^-1
shielded proton mag. mom. 1.410 570 499 e-26 0.000 000 035 e-26 J T^-1
shielded proton mag. mom. to Bohr magneton ratio 1.520 993 128 e-3 0.000 000 017 e-3
shielded proton mag. mom. to nuclear magneton ratio 2.792 775 598 0.000 000 030
speed of light in vacuum 299 792 458 (exact) m s^-1
standard acceleration of gravity 9.806 65 (exact) m s^-2
standard atmosphere 101 325 (exact) Pa
standard-state pressure 100 000 (exact) Pa
Stefan-Boltzmann constant 5.670 373 e-8 0.000 021 e-8 W m^-2 K^-4
tau Compton wavelength 0.697 787 e-15 0.000 063 e-15 m
tau Compton wavelength over 2 pi 0.111 056 e-15 0.000 010 e-15 m
tau-electron mass ratio 3477.15 0.31
tau mass 3.167 47 e-27 0.000 29 e-27 kg
tau mass energy equivalent 2.846 78 e-10 0.000 26 e-10 J
tau mass energy equivalent in MeV 1776.82 0.16 MeV
tau mass in u 1.907 49 0.000 17 u
tau molar mass 1.907 49 e-3 0.000 17 e-3 kg mol^-1
tau-muon mass ratio 16.8167 0.0015
tau-neutron mass ratio 1.891 11 0.000 17
tau-proton mass ratio 1.893 72 0.000 17
Thomson cross section 0.665 245 8734 e-28 0.000 000 0013 e-28 m^2
triton-electron mass ratio 5496.921 5267 0.000 0050
triton g factor 5.957 924 896 0.000 000 076
triton mag. mom. 1.504 609 447 e-26 0.000 000 038 e-26 J T^-1
triton mag. mom. to Bohr magneton ratio 1.622 393 657 e-3 0.000 000 021 e-3
triton mag. mom. to nuclear magneton ratio 2.978 962 448 0.000 000 038
triton mass 5.007 356 30 e-27 0.000 000 22 e-27 kg
triton mass energy equivalent 4.500 387 41 e-10 0.000 000 20 e-10 J
triton mass energy equivalent in MeV 2808.921 005 0.000 062 MeV
triton mass in u 3.015 500 7134 0.000 000 0025 u
triton molar mass 3.015 500 7134 e-3 0.000 000 0025 e-3 kg mol^-1
triton-proton mass ratio 2.993 717 0308 0.000 000 0025
unified atomic mass unit 1.660 538 921 e-27 0.000 000 073 e-27 kg
von Klitzing constant 25 812.807 4434 0.000 0084 ohm
weak mixing angle 0.2223 0.0021
Wien frequency displacement law constant 5.878 9254 e10 0.000 0053 e10 Hz K^-1
Wien wavelength displacement law constant 2.897 7721 e-3 0.000 0026 e-3 m K"""
txt2014 = """\
{220} lattice spacing of silicon 192.015 5714 e-12 0.000 0032 e-12 m
alpha particle-electron mass ratio 7294.299 541 36 0.000 000 24
alpha particle mass 6.644 657 230 e-27 0.000 000 082 e-27 kg
alpha particle mass energy equivalent 5.971 920 097 e-10 0.000 000 073 e-10 J
alpha particle mass energy equivalent in MeV 3727.379 378 0.000 023 MeV
alpha particle mass in u 4.001 506 179 127 0.000 000 000 063 u
alpha particle molar mass 4.001 506 179 127 e-3 0.000 000 000 063 e-3 kg mol^-1
alpha particle-proton mass ratio 3.972 599 689 07 0.000 000 000 36
Angstrom star 1.000 014 95 e-10 0.000 000 90 e-10 m
atomic mass constant 1.660 539 040 e-27 0.000 000 020 e-27 kg
atomic mass constant energy equivalent 1.492 418 062 e-10 0.000 000 018 e-10 J
atomic mass constant energy equivalent in MeV 931.494 0954 0.000 0057 MeV
atomic mass unit-electron volt relationship 931.494 0954 e6 0.000 0057 e6 eV
atomic mass unit-hartree relationship 3.423 177 6902 e7 0.000 000 0016 e7 E_h
atomic mass unit-hertz relationship 2.252 342 7206 e23 0.000 000 0010 e23 Hz
atomic mass unit-inverse meter relationship 7.513 006 6166 e14 0.000 000 0034 e14 m^-1
atomic mass unit-joule relationship 1.492 418 062 e-10 0.000 000 018 e-10 J
atomic mass unit-kelvin relationship 1.080 954 38 e13 0.000 000 62 e13 K
atomic mass unit-kilogram relationship 1.660 539 040 e-27 0.000 000 020 e-27 kg
atomic unit of 1st hyperpolarizability 3.206 361 329 e-53 0.000 000 020 e-53 C^3 m^3 J^-2
atomic unit of 2nd hyperpolarizability 6.235 380 085 e-65 0.000 000 077 e-65 C^4 m^4 J^-3
atomic unit of action 1.054 571 800 e-34 0.000 000 013 e-34 J s
atomic unit of charge 1.602 176 6208 e-19 0.000 000 0098 e-19 C
atomic unit of charge density 1.081 202 3770 e12 0.000 000 0067 e12 C m^-3
atomic unit of current 6.623 618 183 e-3 0.000 000 041 e-3 A
atomic unit of electric dipole mom. 8.478 353 552 e-30 0.000 000 052 e-30 C m
atomic unit of electric field 5.142 206 707 e11 0.000 000 032 e11 V m^-1
atomic unit of electric field gradient 9.717 362 356 e21 0.000 000 060 e21 V m^-2
atomic unit of electric polarizability 1.648 777 2731 e-41 0.000 000 0011 e-41 C^2 m^2 J^-1
atomic unit of electric potential 27.211 386 02 0.000 000 17 V
atomic unit of electric quadrupole mom. 4.486 551 484 e-40 0.000 000 028 e-40 C m^2
atomic unit of energy 4.359 744 650 e-18 0.000 000 054 e-18 J
atomic unit of force 8.238 723 36 e-8 0.000 000 10 e-8 N
atomic unit of length 0.529 177 210 67 e-10 0.000 000 000 12 e-10 m
atomic unit of mag. dipole mom. 1.854 801 999 e-23 0.000 000 011 e-23 J T^-1
atomic unit of mag. flux density 2.350 517 550 e5 0.000 000 014 e5 T
atomic unit of magnetizability 7.891 036 5886 e-29 0.000 000 0090 e-29 J T^-2
atomic unit of mass 9.109 383 56 e-31 0.000 000 11 e-31 kg
atomic unit of mom.um 1.992 851 882 e-24 0.000 000 024 e-24 kg m s^-1
atomic unit of permittivity 1.112 650 056... e-10 (exact) F m^-1
atomic unit of time 2.418 884 326509e-17 0.000 000 000014e-17 s
atomic unit of velocity 2.187 691 262 77 e6 0.000 000 000 50 e6 m s^-1
Avogadro constant 6.022 140 857 e23 0.000 000 074 e23 mol^-1
Bohr magneton 927.400 9994 e-26 0.000 0057 e-26 J T^-1
Bohr magneton in eV/T 5.788 381 8012 e-5 0.000 000 0026 e-5 eV T^-1
Bohr magneton in Hz/T 13.996 245 042 e9 0.000 000 086 e9 Hz T^-1
Bohr magneton in inverse meters per tesla 46.686 448 14 0.000 000 29 m^-1 T^-1
Bohr magneton in K/T 0.671 714 05 0.000 000 39 K T^-1
Bohr radius 0.529 177 210 67 e-10 0.000 000 000 12 e-10 m
Boltzmann constant 1.380 648 52 e-23 0.000 000 79 e-23 J K^-1
Boltzmann constant in eV/K 8.617 3303 e-5 0.000 0050 e-5 eV K^-1
Boltzmann constant in Hz/K 2.083 6612 e10 0.000 0012 e10 Hz K^-1
Boltzmann constant in inverse meters per kelvin 69.503 457 0.000 040 m^-1 K^-1
characteristic impedance of vacuum 376.730 313 461... (exact) ohm
classical electron radius 2.817 940 3227 e-15 0.000 000 0019 e-15 m
Compton wavelength 2.426 310 2367 e-12 0.000 000 0011 e-12 m
Compton wavelength over 2 pi 386.159 267 64 e-15 0.000 000 18 e-15 m
conductance quantum 7.748 091 7310 e-5 0.000 000 0018 e-5 S
conventional value of Josephson constant 483 597.9 e9 (exact) Hz V^-1
conventional value of von Klitzing constant 25 812.807 (exact) ohm
Cu x unit 1.002 076 97 e-13 0.000 000 28 e-13 m
deuteron-electron mag. mom. ratio -4.664 345 535 e-4 0.000 000 026 e-4
deuteron-electron mass ratio 3670.482 967 85 0.000 000 13
deuteron g factor 0.857 438 2311 0.000 000 0048
deuteron mag. mom. 0.433 073 5040 e-26 0.000 000 0036 e-26 J T^-1
deuteron mag. mom. to Bohr magneton ratio 0.466 975 4554 e-3 0.000 000 0026 e-3
deuteron mag. mom. to nuclear magneton ratio 0.857 438 2311 0.000 000 0048
deuteron mass 3.343 583 719 e-27 0.000 000 041 e-27 kg
deuteron mass energy equivalent 3.005 063 183 e-10 0.000 000 037 e-10 J
deuteron mass energy equivalent in MeV 1875.612 928 0.000 012 MeV
deuteron mass in u 2.013 553 212 745 0.000 000 000 040 u
deuteron molar mass 2.013 553 212 745 e-3 0.000 000 000 040 e-3 kg mol^-1
deuteron-neutron mag. mom. ratio -0.448 206 52 0.000 000 11
deuteron-proton mag. mom. ratio 0.307 012 2077 0.000 000 0015
deuteron-proton mass ratio 1.999 007 500 87 0.000 000 000 19
deuteron rms charge radius 2.1413 e-15 0.0025 e-15 m
electric constant 8.854 187 817... e-12 (exact) F m^-1
electron charge to mass quotient -1.758 820 024 e11 0.000 000 011 e11 C kg^-1
electron-deuteron mag. mom. ratio -2143.923 499 0.000 012
electron-deuteron mass ratio 2.724 437 107 484 e-4 0.000 000 000 096 e-4
electron g factor -2.002 319 304 361 82 0.000 000 000 000 52
electron gyromag. ratio 1.760 859 644 e11 0.000 000 011 e11 s^-1 T^-1
electron gyromag. ratio over 2 pi 28 024.951 64 0.000 17 MHz T^-1
electron-helion mass ratio 1.819 543 074 854 e-4 0.000 000 000 088 e-4
electron mag. mom. -928.476 4620 e-26 0.000 0057 e-26 J T^-1
electron mag. mom. anomaly 1.159 652 180 91 e-3 0.000 000 000 26 e-3
electron mag. mom. to Bohr magneton ratio -1.001 159 652 180 91 0.000 000 000 000 26
electron mag. mom. to nuclear magneton ratio -1838.281 972 34 0.000 000 17
electron mass 9.109 383 56 e-31 0.000 000 11 e-31 kg
electron mass energy equivalent 8.187 105 65 e-14 0.000 000 10 e-14 J
electron mass energy equivalent in MeV 0.510 998 9461 0.000 000 0031 MeV
electron mass in u 5.485 799 090 70 e-4 0.000 000 000 16 e-4 u
electron molar mass 5.485 799 090 70 e-7 0.000 000 000 16 e-7 kg mol^-1
electron-muon mag. mom. ratio 206.766 9880 0.000 0046
electron-muon mass ratio 4.836 331 70 e-3 0.000 000 11 e-3
electron-neutron mag. mom. ratio 960.920 50 0.000 23
electron-neutron mass ratio 5.438 673 4428 e-4 0.000 000 0027 e-4
electron-proton mag. mom. ratio -658.210 6866 0.000 0020
electron-proton mass ratio 5.446 170 213 52 e-4 0.000 000 000 52 e-4
electron-tau mass ratio 2.875 92 e-4 0.000 26 e-4
electron to alpha particle mass ratio 1.370 933 554 798 e-4 0.000 000 000 045 e-4
electron to shielded helion mag. mom. ratio 864.058 257 0.000 010
electron to shielded proton mag. mom. ratio -658.227 5971 0.000 0072
electron-triton mass ratio 1.819 200 062 203 e-4 0.000 000 000 084 e-4
electron volt 1.602 176 6208 e-19 0.000 000 0098 e-19 J
electron volt-atomic mass unit relationship 1.073 544 1105 e-9 0.000 000 0066 e-9 u
electron volt-hartree relationship 3.674 932 248 e-2 0.000 000 023 e-2 E_h
electron volt-hertz relationship 2.417 989 262 e14 0.000 000 015 e14 Hz
electron volt-inverse meter relationship 8.065 544 005 e5 0.000 000 050 e5 m^-1
electron volt-joule relationship 1.602 176 6208 e-19 0.000 000 0098 e-19 J
electron volt-kelvin relationship 1.160 452 21 e4 0.000 000 67 e4 K
electron volt-kilogram relationship 1.782 661 907 e-36 0.000 000 011 e-36 kg
elementary charge 1.602 176 6208 e-19 0.000 000 0098 e-19 C
elementary charge over h 2.417 989 262 e14 0.000 000 015 e14 A J^-1
Faraday constant 96 485.332 89 0.000 59 C mol^-1
Faraday constant for conventional electric current 96 485.3251 0.0012 C_90 mol^-1
Fermi coupling constant 1.166 3787 e-5 0.000 0006 e-5 GeV^-2
fine-structure constant 7.297 352 5664 e-3 0.000 000 0017 e-3
first radiation constant 3.741 771 790 e-16 0.000 000 046 e-16 W m^2
first radiation constant for spectral radiance 1.191 042 953 e-16 0.000 000 015 e-16 W m^2 sr^-1
hartree-atomic mass unit relationship 2.921 262 3197 e-8 0.000 000 0013 e-8 u
hartree-electron volt relationship 27.211 386 02 0.000 000 17 eV
Hartree energy 4.359 744 650 e-18 0.000 000 054 e-18 J
Hartree energy in eV 27.211 386 02 0.000 000 17 eV
hartree-hertz relationship 6.579 683 920 711 e15 0.000 000 000 039 e15 Hz
hartree-inverse meter relationship 2.194 746 313 702 e7 0.000 000 000 013 e7 m^-1
hartree-joule relationship 4.359 744 650 e-18 0.000 000 054 e-18 J
hartree-kelvin relationship 3.157 7513 e5 0.000 0018 e5 K
hartree-kilogram relationship 4.850 870 129 e-35 0.000 000 060 e-35 kg
helion-electron mass ratio 5495.885 279 22 0.000 000 27
helion g factor -4.255 250 616 0.000 000 050
helion mag. mom. -1.074 617 522 e-26 0.000 000 014 e-26 J T^-1
helion mag. mom. to Bohr magneton ratio -1.158 740 958 e-3 0.000 000 014 e-3
helion mag. mom. to nuclear magneton ratio -2.127 625 308 0.000 000 025
helion mass 5.006 412 700 e-27 0.000 000 062 e-27 kg
helion mass energy equivalent 4.499 539 341 e-10 0.000 000 055 e-10 J
helion mass energy equivalent in MeV 2808.391 586 0.000 017 MeV
helion mass in u 3.014 932 246 73 0.000 000 000 12 u
helion molar mass 3.014 932 246 73 e-3 0.000 000 000 12 e-3 kg mol^-1
helion-proton mass ratio 2.993 152 670 46 0.000 000 000 29
hertz-atomic mass unit relationship 4.439 821 6616 e-24 0.000 000 0020 e-24 u
hertz-electron volt relationship 4.135 667 662 e-15 0.000 000 025 e-15 eV
hertz-hartree relationship 1.5198298460088 e-16 0.0000000000090e-16 E_h
hertz-inverse meter relationship 3.335 640 951... e-9 (exact) m^-1
hertz-joule relationship 6.626 070 040 e-34 0.000 000 081 e-34 J
hertz-kelvin relationship 4.799 2447 e-11 0.000 0028 e-11 K
hertz-kilogram relationship 7.372 497 201 e-51 0.000 000 091 e-51 kg
inverse fine-structure constant 137.035 999 139 0.000 000 031
inverse meter-atomic mass unit relationship 1.331 025 049 00 e-15 0.000 000 000 61 e-15 u
inverse meter-electron volt relationship 1.239 841 9739 e-6 0.000 000 0076 e-6 eV
inverse meter-hartree relationship 4.556 335 252 767 e-8 0.000 000 000 027 e-8 E_h
inverse meter-hertz relationship 299 792 458 (exact) Hz
inverse meter-joule relationship 1.986 445 824 e-25 0.000 000 024 e-25 J
inverse meter-kelvin relationship 1.438 777 36 e-2 0.000 000 83 e-2 K
inverse meter-kilogram relationship 2.210 219 057 e-42 0.000 000 027 e-42 kg
inverse of conductance quantum 12 906.403 7278 0.000 0029 ohm
Josephson constant 483 597.8525 e9 0.0030 e9 Hz V^-1
joule-atomic mass unit relationship 6.700 535 363 e9 0.000 000 082 e9 u
joule-electron volt relationship 6.241 509 126 e18 0.000 000 038 e18 eV
joule-hartree relationship 2.293 712 317 e17 0.000 000 028 e17 E_h
joule-hertz relationship 1.509 190 205 e33 0.000 000 019 e33 Hz
joule-inverse meter relationship 5.034 116 651 e24 0.000 000 062 e24 m^-1
joule-kelvin relationship 7.242 9731 e22 0.000 0042 e22 K
joule-kilogram relationship 1.112 650 056... e-17 (exact) kg
kelvin-atomic mass unit relationship 9.251 0842 e-14 0.000 0053 e-14 u
kelvin-electron volt relationship 8.617 3303 e-5 0.000 0050 e-5 eV
kelvin-hartree relationship 3.166 8105 e-6 0.000 0018 e-6 E_h
kelvin-hertz relationship 2.083 6612 e10 0.000 0012 e10 Hz
kelvin-inverse meter relationship 69.503 457 0.000 040 m^-1
kelvin-joule relationship 1.380 648 52 e-23 0.000 000 79 e-23 J
kelvin-kilogram relationship 1.536 178 65 e-40 0.000 000 88 e-40 kg
kilogram-atomic mass unit relationship 6.022 140 857 e26 0.000 000 074 e26 u
kilogram-electron volt relationship 5.609 588 650 e35 0.000 000 034 e35 eV
kilogram-hartree relationship 2.061 485 823 e34 0.000 000 025 e34 E_h
kilogram-hertz relationship 1.356 392 512 e50 0.000 000 017 e50 Hz
kilogram-inverse meter relationship 4.524 438 411 e41 0.000 000 056 e41 m^-1
kilogram-joule relationship 8.987 551 787... e16 (exact) J
kilogram-kelvin relationship 6.509 6595 e39 0.000 0037 e39 K
lattice parameter of silicon 543.102 0504 e-12 0.000 0089 e-12 m
Loschmidt constant (273.15 K, 100 kPa) 2.651 6467 e25 0.000 0015 e25 m^-3
Loschmidt constant (273.15 K, 101.325 kPa) 2.686 7811 e25 0.000 0015 e25 m^-3
mag. constant 12.566 370 614... e-7 (exact) N A^-2
mag. flux quantum 2.067 833 831 e-15 0.000 000 013 e-15 Wb
molar gas constant 8.314 4598 0.000 0048 J mol^-1 K^-1
molar mass constant 1 e-3 (exact) kg mol^-1
molar mass of carbon-12 12 e-3 (exact) kg mol^-1
molar Planck constant 3.990 312 7110 e-10 0.000 000 0018 e-10 J s mol^-1
molar Planck constant times c 0.119 626 565 582 0.000 000 000 054 J m mol^-1
molar volume of ideal gas (273.15 K, 100 kPa) 22.710 947 e-3 0.000 013 e-3 m^3 mol^-1
molar volume of ideal gas (273.15 K, 101.325 kPa) 22.413 962 e-3 0.000 013 e-3 m^3 mol^-1
molar volume of silicon 12.058 832 14 e-6 0.000 000 61 e-6 m^3 mol^-1
Mo x unit 1.002 099 52 e-13 0.000 000 53 e-13 m
muon Compton wavelength 11.734 441 11 e-15 0.000 000 26 e-15 m
muon Compton wavelength over 2 pi 1.867 594 308 e-15 0.000 000 042 e-15 m
muon-electron mass ratio 206.768 2826 0.000 0046
muon g factor -2.002 331 8418 0.000 000 0013
muon mag. mom. -4.490 448 26 e-26 0.000 000 10 e-26 J T^-1
muon mag. mom. anomaly 1.165 920 89 e-3 0.000 000 63 e-3
muon mag. mom. to Bohr magneton ratio -4.841 970 48 e-3 0.000 000 11 e-3
muon mag. mom. to nuclear magneton ratio -8.890 597 05 0.000 000 20
muon mass 1.883 531 594 e-28 0.000 000 048 e-28 kg
muon mass energy equivalent 1.692 833 774 e-11 0.000 000 043 e-11 J
muon mass energy equivalent in MeV 105.658 3745 0.000 0024 MeV
muon mass in u 0.113 428 9257 0.000 000 0025 u
muon molar mass 0.113 428 9257 e-3 0.000 000 0025 e-3 kg mol^-1
muon-neutron mass ratio 0.112 454 5167 0.000 000 0025
muon-proton mag. mom. ratio -3.183 345 142 0.000 000 071
muon-proton mass ratio 0.112 609 5262 0.000 000 0025
muon-tau mass ratio 5.946 49 e-2 0.000 54 e-2
natural unit of action 1.054 571 800 e-34 0.000 000 013 e-34 J s
natural unit of action in eV s 6.582 119 514 e-16 0.000 000 040 e-16 eV s
natural unit of energy 8.187 105 65 e-14 0.000 000 10 e-14 J
natural unit of energy in MeV 0.510 998 9461 0.000 000 0031 MeV
natural unit of length 386.159 267 64 e-15 0.000 000 18 e-15 m
natural unit of mass 9.109 383 56 e-31 0.000 000 11 e-31 kg
natural unit of mom.um 2.730 924 488 e-22 0.000 000 034 e-22 kg m s^-1
natural unit of mom.um in MeV/c 0.510 998 9461 0.000 000 0031 MeV/c
natural unit of time 1.288 088 667 12 e-21 0.000 000 000 58 e-21 s
natural unit of velocity 299 792 458 (exact) m s^-1
neutron Compton wavelength 1.319 590 904 81 e-15 0.000 000 000 88 e-15 m
neutron Compton wavelength over 2 pi 0.210 019 415 36 e-15 0.000 000 000 14 e-15 m
neutron-electron mag. mom. ratio 1.040 668 82 e-3 0.000 000 25 e-3
neutron-electron mass ratio 1838.683 661 58 0.000 000 90
neutron g factor -3.826 085 45 0.000 000 90
neutron gyromag. ratio 1.832 471 72 e8 0.000 000 43 e8 s^-1 T^-1
neutron gyromag. ratio over 2 pi 29.164 6933 0.000 0069 MHz T^-1
neutron mag. mom. -0.966 236 50 e-26 0.000 000 23 e-26 J T^-1
neutron mag. mom. to Bohr magneton ratio -1.041 875 63 e-3 0.000 000 25 e-3
neutron mag. mom. to nuclear magneton ratio -1.913 042 73 0.000 000 45
neutron mass 1.674 927 471 e-27 0.000 000 021 e-27 kg
neutron mass energy equivalent 1.505 349 739 e-10 0.000 000 019 e-10 J
neutron mass energy equivalent in MeV 939.565 4133 0.000 0058 MeV
neutron mass in u 1.008 664 915 88 0.000 000 000 49 u
neutron molar mass 1.008 664 915 88 e-3 0.000 000 000 49 e-3 kg mol^-1
neutron-muon mass ratio 8.892 484 08 0.000 000 20
neutron-proton mag. mom. ratio -0.684 979 34 0.000 000 16
neutron-proton mass difference 2.305 573 77 e-30 0.000 000 85 e-30
neutron-proton mass difference energy equivalent 2.072 146 37 e-13 0.000 000 76 e-13
neutron-proton mass difference energy equivalent in MeV 1.293 332 05 0.000 000 48
neutron-proton mass difference in u 0.001 388 449 00 0.000 000 000 51
neutron-proton mass ratio 1.001 378 418 98 0.000 000 000 51
neutron-tau mass ratio 0.528 790 0.000 048
neutron to shielded proton mag. mom. ratio -0.684 996 94 0.000 000 16
Newtonian constant of gravitation 6.674 08 e-11 0.000 31 e-11 m^3 kg^-1 s^-2
Newtonian constant of gravitation over h-bar c 6.708 61 e-39 0.000 31 e-39 (GeV/c^2)^-2
nuclear magneton 5.050 783 699 e-27 0.000 000 031 e-27 J T^-1
nuclear magneton in eV/T 3.152 451 2550 e-8 0.000 000 0015 e-8 eV T^-1
nuclear magneton in inverse meters per tesla 2.542 623 432 e-2 0.000 000 016 e-2 m^-1 T^-1
nuclear magneton in K/T 3.658 2690 e-4 0.000 0021 e-4 K T^-1
nuclear magneton in MHz/T 7.622 593 285 0.000 000 047 MHz T^-1
Planck constant 6.626 070 040 e-34 0.000 000 081 e-34 J s
Planck constant in eV s 4.135 667 662 e-15 0.000 000 025 e-15 eV s
Planck constant over 2 pi 1.054 571 800 e-34 0.000 000 013 e-34 J s
Planck constant over 2 pi in eV s 6.582 119 514 e-16 0.000 000 040 e-16 eV s
Planck constant over 2 pi times c in MeV fm 197.326 9788 0.000 0012 MeV fm
Planck length 1.616 229 e-35 0.000 038 e-35 m
Planck mass 2.176 470 e-8 0.000 051 e-8 kg
Planck mass energy equivalent in GeV 1.220 910 e19 0.000 029 e19 GeV
Planck temperature 1.416 808 e32 0.000 033 e32 K
Planck time 5.391 16 e-44 0.000 13 e-44 s
proton charge to mass quotient 9.578 833 226 e7 0.000 000 059 e7 C kg^-1
proton Compton wavelength 1.321 409 853 96 e-15 0.000 000 000 61 e-15 m
proton Compton wavelength over 2 pi 0.210 308910109e-15 0.000 000 000097e-15 m
proton-electron mass ratio 1836.152 673 89 0.000 000 17
proton g factor 5.585 694 702 0.000 000 017
proton gyromag. ratio 2.675 221 900 e8 0.000 000 018 e8 s^-1 T^-1
proton gyromag. ratio over 2 pi 42.577 478 92 0.000 000 29 MHz T^-1
proton mag. mom. 1.410 606 7873 e-26 0.000 000 0097 e-26 J T^-1
proton mag. mom. to Bohr magneton ratio 1.521 032 2053 e-3 0.000 000 0046 e-3
proton mag. mom. to nuclear magneton ratio 2.792 847 3508 0.000 000 0085
proton mag. shielding correction 25.691 e-6 0.011 e-6
proton mass 1.672 621 898 e-27 0.000 000 021 e-27 kg
proton mass energy equivalent 1.503 277 593 e-10 0.000 000 018 e-10 J
proton mass energy equivalent in MeV 938.272 0813 0.000 0058 MeV
proton mass in u 1.007 276 466 879 0.000 000 000 091 u
proton molar mass 1.007 276 466 879 e-3 0.000 000 000 091 e-3 kg mol^-1
proton-muon mass ratio 8.880 243 38 0.000 000 20
proton-neutron mag. mom. ratio -1.459 898 05 0.000 000 34
proton-neutron mass ratio 0.998 623 478 44 0.000 000 000 51
proton rms charge radius 0.8751 e-15 0.0061 e-15 m
proton-tau mass ratio 0.528 063 0.000 048
quantum of circulation 3.636 947 5486 e-4 0.000 000 0017 e-4 m^2 s^-1
quantum of circulation times 2 7.273 895 0972 e-4 0.000 000 0033 e-4 m^2 s^-1
Rydberg constant 10 973 731.568 508 0.000 065 m^-1
Rydberg constant times c in Hz 3.289 841 960 355 e15 0.000 000 000 019 e15 Hz
Rydberg constant times hc in eV 13.605 693 009 0.000 000 084 eV
Rydberg constant times hc in J 2.179 872 325 e-18 0.000 000 027 e-18 J
Sackur-Tetrode constant (1 K, 100 kPa) -1.151 7084 0.000 0014
Sackur-Tetrode constant (1 K, 101.325 kPa) -1.164 8714 0.000 0014
second radiation constant 1.438 777 36 e-2 0.000 000 83 e-2 m K
shielded helion gyromag. ratio 2.037 894 585 e8 0.000 000 027 e8 s^-1 T^-1
shielded helion gyromag. ratio over 2 pi 32.434 099 66 0.000 000 43 MHz T^-1
shielded helion mag. mom. -1.074 553 080 e-26 0.000 000 014 e-26 J T^-1
shielded helion mag. mom. to Bohr magneton ratio -1.158 671 471 e-3 0.000 000 014 e-3
shielded helion mag. mom. to nuclear magneton ratio -2.127 497 720 0.000 000 025
shielded helion to proton mag. mom. ratio -0.761 766 5603 0.000 000 0092
shielded helion to shielded proton mag. mom. ratio -0.761 786 1313 0.000 000 0033
shielded proton gyromag. ratio 2.675 153 171 e8 0.000 000 033 e8 s^-1 T^-1
shielded proton gyromag. ratio over 2 pi 42.576 385 07 0.000 000 53 MHz T^-1
shielded proton mag. mom. 1.410 570 547 e-26 0.000 000 018 e-26 J T^-1
shielded proton mag. mom. to Bohr magneton ratio 1.520 993 128 e-3 0.000 000 017 e-3
shielded proton mag. mom. to nuclear magneton ratio 2.792 775 600 0.000 000 030
speed of light in vacuum 299 792 458 (exact) m s^-1
standard acceleration of gravity 9.806 65 (exact) m s^-2
standard atmosphere 101 325 (exact) Pa
standard-state pressure 100 000 (exact) Pa
Stefan-Boltzmann constant 5.670 367 e-8 0.000 013 e-8 W m^-2 K^-4
tau Compton wavelength 0.697 787 e-15 0.000 063 e-15 m
tau Compton wavelength over 2 pi 0.111 056 e-15 0.000 010 e-15 m
tau-electron mass ratio 3477.15 0.31
tau mass 3.167 47 e-27 0.000 29 e-27 kg
tau mass energy equivalent 2.846 78 e-10 0.000 26 e-10 J
tau mass energy equivalent in MeV 1776.82 0.16 MeV
tau mass in u 1.907 49 0.000 17 u
tau molar mass 1.907 49 e-3 0.000 17 e-3 kg mol^-1
tau-muon mass ratio 16.8167 0.0015
tau-neutron mass ratio 1.891 11 0.000 17
tau-proton mass ratio 1.893 72 0.000 17
Thomson cross section 0.665 245 871 58 e-28 0.000 000 000 91 e-28 m^2
triton-electron mass ratio 5496.921 535 88 0.000 000 26
triton g factor 5.957 924 920 0.000 000 028
triton mag. mom. 1.504 609 503 e-26 0.000 000 012 e-26 J T^-1
triton mag. mom. to Bohr magneton ratio 1.622 393 6616 e-3 0.000 000 0076 e-3
triton mag. mom. to nuclear magneton ratio 2.978 962 460 0.000 000 014
triton mass 5.007 356 665 e-27 0.000 000 062 e-27 kg
triton mass energy equivalent 4.500 387 735 e-10 0.000 000 055 e-10 J
triton mass energy equivalent in MeV 2808.921 112 0.000 017 MeV
triton mass in u 3.015 500 716 32 0.000 000 000 11 u
triton molar mass 3.015 500 716 32 e-3 0.000 000 000 11 e-3 kg mol^-1
triton-proton mass ratio 2.993 717 033 48 0.000 000 000 22
unified atomic mass unit 1.660 539 040 e-27 0.000 000 020 e-27 kg
von Klitzing constant 25 812.807 4555 0.000 0059 ohm
weak mixing angle 0.2223 0.0021
Wien frequency displacement law constant 5.878 9238 e10 0.000 0034 e10 Hz K^-1
Wien wavelength displacement law constant 2.897 7729 e-3 0.000 0017 e-3 m K"""
# -----------------------------------------------------------------------------
physical_constants = {}
def parse_constants(d):
constants = {}
for line in d.split('\n'):
name = line[:55].rstrip()
val = line[55:77].replace(' ', '').replace('...', '')
val = float(val)
uncert = line[77:99].replace(' ', '').replace('(exact)', '0')
uncert = float(uncert)
units = line[99:].rstrip()
constants[name] = (val, units, uncert)
return constants
_physical_constants_2002 = parse_constants(txt2002)
_physical_constants_2006 = parse_constants(txt2006)
_physical_constants_2010 = parse_constants(txt2010)
_physical_constants_2014 = parse_constants(txt2014)
physical_constants.update(_physical_constants_2002)
physical_constants.update(_physical_constants_2006)
physical_constants.update(_physical_constants_2010)
physical_constants.update(_physical_constants_2014)
_current_constants = _physical_constants_2014
_current_codata = "CODATA 2014"
# check obsolete values
_obsolete_constants = {}
for k in physical_constants:
if k not in _current_constants:
_obsolete_constants[k] = True
# generate some additional aliases
_aliases = {}
for k in _physical_constants_2002:
if 'magn.' in k:
_aliases[k] = k.replace('magn.', 'mag.')
for k in _physical_constants_2006:
if 'momentum' in k:
_aliases[k] = k.replace('momentum', 'mom.um')
class ConstantWarning(DeprecationWarning):
"""Accessing a constant no longer in current CODATA data set"""
pass
def _check_obsolete(key):
if key in _obsolete_constants and key not in _aliases:
warnings.warn("Constant '%s' is not in current %s data set" % (
key, _current_codata), ConstantWarning)
def value(key):
"""
Value in physical_constants indexed by key
Parameters
----------
key : Python string or unicode
Key in dictionary `physical_constants`
Returns
-------
value : float
Value in `physical_constants` corresponding to `key`
Examples
--------
>>> from scipy import constants
>>> constants.value(u'elementary charge')
1.6021766208e-19
"""
_check_obsolete(key)
return physical_constants[key][0]
def unit(key):
"""
Unit in physical_constants indexed by key
Parameters
----------
key : Python string or unicode
Key in dictionary `physical_constants`
Returns
-------
unit : Python string
Unit in `physical_constants` corresponding to `key`
Examples
--------
>>> from scipy import constants
>>> constants.unit(u'proton mass')
'kg'
"""
_check_obsolete(key)
return physical_constants[key][1]
def precision(key):
"""
Relative precision in physical_constants indexed by key
Parameters
----------
key : Python string or unicode
Key in dictionary `physical_constants`
Returns
-------
prec : float
Relative precision in `physical_constants` corresponding to `key`
Examples
--------
>>> from scipy import constants
>>> constants.precision(u'proton mass')
1.2555138746605121e-08
"""
_check_obsolete(key)
return physical_constants[key][2] / physical_constants[key][0]
def find(sub=None, disp=False):
"""
Return list of physical_constant keys containing a given string.
Parameters
----------
sub : str, unicode
Sub-string to search keys for. By default, return all keys.
disp : bool
If True, print the keys that are found, and return None.
Otherwise, return the list of keys without printing anything.
Returns
-------
keys : list or None
If `disp` is False, the list of keys is returned.
Otherwise, None is returned.
Examples
--------
>>> from scipy.constants import find, physical_constants
Which keys in the ``physical_constants`` dictionary contain 'boltzmann'?
>>> find('boltzmann')
['Boltzmann constant',
'Boltzmann constant in Hz/K',
'Boltzmann constant in eV/K',
'Boltzmann constant in inverse meters per kelvin',
'Stefan-Boltzmann constant']
Get the constant called 'Boltzmann constant in Hz/K':
>>> physical_constants['Boltzmann constant in Hz/K']
(20836612000.0, 'Hz K^-1', 12000.0)
Find constants with 'radius' in the key:
>>> find('radius')
['Bohr radius',
'classical electron radius',
'deuteron rms charge radius',
'proton rms charge radius']
>>> physical_constants['classical electron radius']
(2.8179403227e-15, 'm', 1.9e-24)
"""
if sub is None:
result = list(_current_constants.keys())
else:
result = [key for key in _current_constants
if sub.lower() in key.lower()]
result.sort()
if disp:
for key in result:
print(key)
return
else:
return result
# Table is lacking some digits for exact values: calculate from definition
c = value('speed of light in vacuum')
mu0 = 4e-7 * pi
epsilon0 = 1 / (mu0 * c * c)
exact_values = {
'mag. constant': (mu0, 'N A^-2', 0.0),
'electric constant': (epsilon0, 'F m^-1', 0.0),
'characteristic impedance of vacuum': (sqrt(mu0 / epsilon0), 'ohm', 0.0),
'atomic unit of permittivity': (4 * epsilon0 * pi, 'F m^-1', 0.0),
'joule-kilogram relationship': (1 / (c * c), 'kg', 0.0),
'kilogram-joule relationship': (c * c, 'J', 0.0),
'hertz-inverse meter relationship': (1 / c, 'm^-1', 0.0)
}
# sanity check
for key in exact_values:
val = _current_constants[key][0]
if abs(exact_values[key][0] - val) / val > 1e-9:
raise ValueError("Constants.codata: exact values too far off.")
physical_constants.update(exact_values)
# finally, insert aliases for values
for k, v in list(_aliases.items()):
if v in _current_constants:
physical_constants[k] = physical_constants[v]
else:
del _aliases[k]
|
gertingold/scipy
|
scipy/constants/codata.py
|
Python
|
bsd-3-clause
| 115,314
|
import re
import socket
import errno
import select
import logging
import queue as q
import event_util as eu
import time
import numerics as nu
class Network(object):
'''
Handles messages to the socket
Consists of a few basic functions aside from sending/receiving.
Sending NICK, USER, message parsing, and sending PONG responses.
'''
# Really long regex to match and split most irc messages correctly (No guarantees though as I haven"t fully roadtested it)
ircmsg = re.compile(r"(?P<prefix>:\S+ )?(?P<command>(\w+|\d{3}))(?P<params>( [^:]\S+)*)(?P<postfix> :.*)?")
def __init__(self, inqueue, outqueue, botname, module_name="network", b_size=1024, log_level=logging.INFO):
self.socket = None
self.module_name = module_name
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setblocking(0)
self.incomplete_buffer = ""
self.buffer_size = b_size
self.log = logging.getLogger(u"{0}.{1}".format(botname, module_name))
self.log.setLevel(log_level)
self.is_running = True
self.connected = False
# priority queues with data in form of (priority, data)
self.inq = inqueue
self.outq = outqueue
# list of our sockets
self.inputs = [self.socket]
self.outputs = [self.socket]
# Events coming out of the network - unused for now
self.in_events = []
# Event coming in from the ircbot core
self.out_events = [
eu.event(nu.BOT_MSG, self.msg),
eu.event(nu.BOT_MSGS_ALL, self.msgs_all),
eu.event(nu.BOT_MSGS, self.msgs),
eu.event(nu.BOT_MSG_ALL, self.msg_all),
eu.event(nu.BOT_NOTICE, self.notice),
eu.event(nu.BOT_NOTICE_ALL, self.notice_all),
eu.event(nu.BOT_CONN, self.connect),
eu.event(nu.BOT_USER, self.user),
eu.event(nu.BOT_NICK, self.nick),
eu.event(nu.BOT_JOIN_CHAN, self.join),
eu.event(nu.BOT_QUIT, self.quit),
eu.event(nu.BOT_KILL, self.kill),
eu.event(nu.BOT_PONG, self.pong),
eu.event(nu.BOT_NAMES, self.names),
eu.event(nu.BOT_WHO, self.who),
]
self.log.info('network initialised')
def loop(self):
self.log.debug('Looping started')
while self.is_running:
if not self.connected:
try:
m_event = self.outq.get(False)
if m_event.type == nu.BOT_CONN:
self.log.debug('Connection event!')
self.connect(*m_event.data)
else:
pass
except q.Empty as e:
pass
else:
self.poll_sockets()
time.sleep(.1)
self.log.info('network ending')
def poll_sockets(self):
'''
Uses select to get readable/writable sockets then calls
read or write on them
'''
readable, writable, exceptional = select.select(self.inputs, self.outputs, self.inputs)
if readable:
for r in readable:
# read from r, placing the messages in the given queue
self.handle_input(r, self.inq)
if writable and not self.outq.empty():
for w in writable:
# write to w with items pulled from the given queue
self.handle_output(w, self.outq)
if exceptional:
for e in exceptional:
# TODO can we get the error to log as well?
self.log.error(u'Exceptional socket {0}'.format(e.getpeername()))
self.inputs.remove(e)
self.outputs.remove(e)
# if we lost all our sockets
if not self.inputs or not self.outputs:
self.log.error(u'No sockets left to read/write')
self.connected = False
# highest priority message that will get client to attempt to reconnect
self.inq.put(eu.error('No sockets left to read/write from', priority=1))
def handle_output(self, socket, outqueue):
'''
Takes an item from the outbound queue and puts it through our internal
event handlers
'''
try:
# grab an item from the outbound queue
m_event = self.outq.get(False)
self.log.debug(u'Outwards event, {0}, data {1}'.format(m_event.type, m_event.data))
# put them through our outbound event handlers
triggered = False
for e in self.out_events:
if e(m_event):
triggered = True
if not triggered:
self.log.debug(u'Unhandled outbound event {0} data:{1}'.format(m_event.type, m_event.data))
except q.Empty:
# nothing to write
pass
def handle_input(self, socket, inqueue):
'''
Pull all possible irc lines from the socket
parse them and then put them through our internal
event handling before sending them to the client
'''
# pull all current lines from socket
result = self.recv()
clean = []
for line in result:
cleaned_message = self.parse_message(line)
clean.append(cleaned_message)
# go through the cleaned messages and put them through our internal
# event handling before they reach client (normally used to tweak priorities)
for msg in clean:
for event in self.in_events:
event(msg) # TODO do I really need this?
self.inq.put(msg)
def send(self, line, encoding='utf-8'):
# send the message out
# send takes the form SENDOUT [lines..]
self.log.info(u'>> {0}'.format(line))
line = line.replace('\r', ' ').replace('\n', ' ') + '\r\n'
totalsent = 0
while totalsent < len(line):
sent = self.socket.send(line[totalsent:].encode(encoding))
totalsent = totalsent + sent
def recv(self):
'''
Receives data from the server.
'''
buffer_size = self.buffer_size
d = self.socket.recv(buffer_size)
data = d.decode('utf-8', 'replace')
'''
Read a stream of data, splitting it into messages separated by \r\n.
The last incomplete message (if any) will be stored in the incomplete
buffer variable to be used in the next read of the data stream
Every time we get new data we put the incomplete buffer at the front then we
check if the last 2 chars are the delimiter, in which case we have a full
irc msg so we can just split the data. Otherwise we have to split the data
and put the last incomplete item on the buffer
'''
if self.incomplete_buffer:
data = self.incomplete_buffer + data
incomplete_buffer = ''
if data[-2:] is '\r\n':
split_data = data.split('\r\n')
else:
split_data = data.split('\r\n')
self.incomplete_buffer = split_data.pop(-1)
return split_data
def parse_message(self, message):
'''
Takes messages from the socket and converts them into internal events
'''
m = self.ircmsg.match(message)
if not m:
self.log.warn(u'Couldn\'t match message {0}'.format(message))
return None
postfix = m.group('postfix')
if postfix:
postfix = postfix.strip(' ')
postfix = postfix.lstrip(':')
command = m.group('command')
prefix = m.group('prefix')
if prefix:
prefix = prefix.strip(' ')
prefix = prefix.lstrip(':')
params = m.group('params')
if params:
params = params.strip(' ')
params = params.split(' ')
self.log.debug(u'Cleaned message, prefix = {0}, command = {1}, params = {2}, postfix = {3}'.format(prefix, command, params, postfix))
self.log.info(u'<< {0} {1} {2} {3}'.format(prefix, command, params, postfix))
return eu.irc_msg(command, (command, prefix, params, postfix))
# Everything below this point are handlers for events from botcore
def msgs_all(self, msgs, channels):
'''
Accepts a list of messages to send to a list of channels
msgs: A list of messages to send
channels: A list of targets to send it to
'''
for channel in channels:
for message in msgs:
self.msg(message, channel)
def msg_all(self, message, channels):
'''
Accepts a message to send to a list of channels
message: the message to send
channels: A list of targets to send it to
'''
for channel in channels:
self.msg(message, channel)
def msg(self, message, channel):
'''
Send a message to a specific target.
message: the message to send
channel: the target to send it to
This method takes care of enforcing the 512 character limit
right now it does it very simply by cutting the message at char 510
(leaving space for the \r\n) and calling msg again with the remainder
later on it might be improved by finding the nearest space to cut on
under the limit
'''
msg = u'PRIVMSG {0} :{1}'.format(channel, message)
if len(msg) > 512:
sending = msg[:510]
remainder = msg[510:]
self.send(sending)
self.msg(remainder, channel)
else:
self.send(msg)
def msgs(self, msgs, channel):
'''
Send a list of msgs to a channel
'''
for msg in msgs:
self.msg(msg, channel)
def notice(self, message, channel):
'''
Send a notice to a specific target.
message: the message to send
channel: the target to send it to
This method takes care of enforcing the 512 character limit
right now it does it very simply by cutting the message at char 510
(leaving space for the \r\n) and calling msg again with the remainder
later on it might be improved by finding the nearest space to cut on
under the limit
'''
msg = u'NOTICE {0} :{1}'.format(channel, message)
if len(msg) > 512:
sending = msg[:510]
remainder = msg[510:]
self.send(sending)
self.msg(remainder, channel)
else:
self.send(msg)
def notice_all(self, message, channels):
'''
Accepts a message to send to a list of channels
message: the message to send
channels: A list of targets to send it to
'''
for channel in channels:
self.notice(message, channel)
def join(self, channel):
'''
Join a channel.
channel: the channel to join
'''
self.send(u'JOIN {0}'.format(channel))
def quit(self, message):
'''
Disconnects from a server with a optional QUIT message.
'''
if message:
self.send(u'QUIT :{0}'.format(message))
else:
self.send(u'QUIT')
def kill(self):
'''
Die!
'''
self.is_running = False
def leave(self, channel, message):
'''
Leaves a channel, optionally sending a message to the channel first.
channel: Channel to leave
message: optional message to send first
'''
if message:
self.msg(message, channel)
self.send(u'PART {0}'.format(channel))
def connect(self, server, port):
'''
Connect to a server.
data is the args
'''
try:
self.socket.connect((server, port))
except socket.error as e:
if e.errno == 115:
pass
elif e.errno == 10035:
pass
else:
raise e
self.connected = True
def nick(self, nick):
'''
Send the nick command with the given nick
'''
self.send(u'NICK {0}'.format(nick))
def user(self, nick, realname):
'''
Send the USER command with the given realname and nick
HOSTNAME and SERVERNAME are given as pybot
'''
self.send(u'USER {0} pybot pybot :{1}'.format(nick, realname))
def pong(self, msg):
self.send(u'PONG {0}'.format(msg))
def names(self, channels):
'''
Send the NAMES command with the given set of channels to call
for
'''
if channels:
self.send(u'NAMES {0}'.format(','.join(channels)))
else:
self.send(u'NAMES')
def who(self, param):
'''
send the Who message with given param
'''
if param:
self.send(u'WHO {0}'.format(param))
else:
self.send(u'WHO')
|
optimumtact/simplepybot
|
network.py
|
Python
|
mit
| 13,092
|
"""
weasyprint.tests.test_draw.test_tables
--------------------------------------
Test how tables are drawn.
"""
import pytest
from weasyprint.html import HTML_HANDLERS
from ..testing_utils import assert_no_logs
from . import assert_pixels, parse_pixels
PIX_BY_CHAR_OVERRIDES = {
# rgba(255, 0, 0, 0.5) above #fff
'r': (255, 127, 127),
# rgba(0, 255, 0, 0.5) above #fff
'g': (127, 255, 127),
# r above B above #fff.
'b': (128, 0, 127),
}
def to_pix(pixels_str):
return parse_pixels(pixels_str, PIX_BY_CHAR_OVERRIDES)
# TODO: refactor colspan/rowspan into CSS:
# td, th { column-span: attr(colspan integer) }
HTML_HANDLERS['x-td'] = HTML_HANDLERS['td']
HTML_HANDLERS['x-th'] = HTML_HANDLERS['th']
tables_source = '''
<style>
@page { size: 28px; background: #fff }
x-table { margin: 1px; padding: 1px; border-spacing: 1px;
border: 1px solid transparent }
x-td { width: 2px; height: 2px; padding: 1px;
border: 1px solid transparent }
%(extra_css)s
</style>
<x-table>
<x-colgroup>
<x-col></x-col>
<x-col></x-col>
</x-colgroup>
<x-col></x-col>
<x-tbody>
<x-tr>
<x-td></x-td>
<x-td rowspan=2></x-td>
<x-td></x-td>
</x-tr>
<x-tr>
<x-td colspan=2></x-td>
<x-td></x-td>
</x-tr>
</x-tbody>
<x-tr>
<x-td></x-td>
<x-td></x-td>
</x-tr>
</x-table>
'''
@assert_no_logs
def test_tables_1():
assert_pixels('table_borders', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__r____r_r____r_r____r__B_
_B__r____r_r____r_r____r__B_
_B__r____r_r____r_r____r__B_
_B__r____r_r____r_r____r__B_
_B__rrrrrr_r____r_rrrrrr__B_
_B_________r____r_________B_
_B__rrrrrrrSrrrrS_rrrrrr__B_
_B__r______r____S_r____r__B_
_B__r______r____S_r____r__B_
_B__r______r____S_r____r__B_
_B__r______r____S_r____r__B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B________________________B_
_B__rrrrrr_rrrrrr_________B_
_B__r____r_r____r_________B_
_B__r____r_r____r_________B_
_B__r____r_r____r_________B_
_B__r____r_r____r_________B_
_B__rrrrrr_rrrrrr_________B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed }
x-td { border-color: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_1_rtl():
assert_pixels('table_borders_rtl', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__r____r_r____r_r____r__B_
_B__r____r_r____r_r____r__B_
_B__r____r_r____r_r____r__B_
_B__r____r_r____r_r____r__B_
_B__rrrrrr_r____r_rrrrrr__B_
_B_________r____r_________B_
_B__rrrrrr_SrrrrSrrrrrrr__B_
_B__r____r_S____r______r__B_
_B__r____r_S____r______r__B_
_B__r____r_S____r______r__B_
_B__r____r_S____r______r__B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B________________________B_
_B_________rrrrrr_rrrrrr__B_
_B_________r____r_r____r__B_
_B_________r____r_r____r__B_
_B_________r____r_r____r__B_
_B_________r____r_r____r__B_
_B_________rrrrrr_rrrrrr__B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;
direction: rtl; }
x-td { border-color: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_2():
assert_pixels('table_collapsed_borders', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBB_________
_BBBBBBBBBBBBBBBBBB_________
_BB____r____r____BB_________
_BB____r____r____BB_________
_BB____r____r____BB_________
_BB____r____r____BB_________
_BBrrrrr____rrrrrBB_________
_BB_________r____BB_________
_BB_________r____BB_________
_BB_________r____BB_________
_BB_________r____BB_________
_BBrrrrrrrrrrrrrrBB_________
_BB____r____r____BB_________
_BB____r____r____BB_________
_BB____r____r____BB_________
_BB____r____r____BB_________
_BBBBBBBBBBBBBBBBBB_________
_BBBBBBBBBBBBBBBBBB_________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
'''), tables_source % {'extra_css': '''
x-table { border: 2px solid #00f; table-layout: fixed;
border-collapse: collapse }
x-td { border-color: #ff7f7f }
'''})
@assert_no_logs
def test_tables_2_rtl():
assert_pixels('table_collapsed_borders_rtl', 28, 28, to_pix('''
____________________________
_________BBBBBBBBBBBBBBBBBB_
_________BBBBBBBBBBBBBBBBBB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BBrrrrr____rrrrrBB_
_________BB____r_________BB_
_________BB____r_________BB_
_________BB____r_________BB_
_________BB____r_________BB_
_________BBrrrrrrrrrrrrrrBB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BBBBBBBBBBBBBBBBBB_
_________BBBBBBBBBBBBBBBBBB_
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
'''), tables_source % {'extra_css': '''
body { direction: rtl; }
x-table { border: 2px solid #00f; table-layout: fixed;
border-collapse: collapse; }
x-td { border-color: #ff7f7f }
'''})
@assert_no_logs
def test_tables_3():
assert_pixels('table_collapsed_borders_paged', 28, 52, to_pix('''
____________________________
_gggggggggggggggggggggggggg_
_g________________________g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BB____r____r____BB_____g_
_g_BB____r____r____BB_____g_
_g_BB____r____r____BB_____g_
_g_BB____r____r____BB_____g_
_g_BBrrrrr____rrrrrBB_____g_
_g_BB_________r____BB_____g_
_g_BB_________r____BB_____g_
_g_BB_________r____BB_____g_
_g_BB_________r____BB_____g_
_g_BBrrrrrrrrrrrrrrBB_____g_
_g________________________g_
_g________________________g_
_g________________________g_
_gggggggggggggggggggggggggg_
____________________________
____________________________
_gggggggggggggggggggggggggg_
_g_BBrrrrrrrrrrrrrrBB_____g_
_g_BB____r____r____BB_____g_
_g_BB____r____r____BB_____g_
_g_BB____r____r____BB_____g_
_g_BB____r____r____BB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g_BBBBBBBBBBBBBBBBBB_____g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_gggggggggggggggggggggggggg_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border: solid #00f; border-width: 8px 2px;
table-layout: fixed; border-collapse: collapse }
x-td { border-color: #ff7f7f }
@page { size: 28px 26px; margin: 1px;
border: 1px solid rgba(0, 255, 0, 0.5); }
'''})
@assert_no_logs
def test_tables_3_rtl():
assert_pixels('table_collapsed_borders_paged_rtl', 28, 52, to_pix('''
____________________________
_gggggggggggggggggggggggggg_
_g________________________g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BBrrrrr____rrrrrBB_g_
_g_____BB____r_________BB_g_
_g_____BB____r_________BB_g_
_g_____BB____r_________BB_g_
_g_____BB____r_________BB_g_
_g_____BBrrrrrrrrrrrrrrBB_g_
_g________________________g_
_g________________________g_
_g________________________g_
_gggggggggggggggggggggggggg_
____________________________
____________________________
_gggggggggggggggggggggggggg_
_g_____BBrrrrrrrrrrrrrrBB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_gggggggggggggggggggggggggg_
____________________________
'''), tables_source % {'extra_css': '''
body { direction: rtl; }
x-table { border: solid #00f; border-width: 8px 2px;
table-layout: fixed; border-collapse: collapse; }
x-td { border-color: #ff7f7f }
@page { size: 28px 26px; margin: 1px;
border: 1px solid rgba(0, 255, 0, 0.5); }
'''})
@assert_no_logs
def test_tables_4():
assert_pixels('table_td_backgrounds', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B_________rrrrrr_________B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B__rrrrrrrSSSSSS_rrrrrr__B_
_B________________________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed }
x-td { background: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_4_rtl():
assert_pixels('table_td_backgrounds_rtl', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B__rrrrrr_rrrrrr_rrrrrr__B_
_B_________rrrrrr_________B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B__rrrrrr_SSSSSSrrrrrrr__B_
_B________________________B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;
direction: rtl; }
x-td { background: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_5():
assert_pixels('table_row_backgrounds', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B_________bbbbbb_________B_
_B__bbbbbbbpppppp_bbbbbb__B_
_B__bbbbbbbpppppp_bbbbbb__B_
_B__bbbbbbbpppppp_bbbbbb__B_
_B__bbbbbbbpppppp_bbbbbb__B_
_B__bbbbbbbpppppp_bbbbbb__B_
_B__bbbbbbbpppppp_bbbbbb__B_
_B________________________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B__rrrrrr_rrrrrr_________B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed }
x-tbody { background: rgba(0, 0, 255, 1) }
x-tr { background: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_5_rtl():
assert_pixels('table_row_backgrounds_rtl', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B_________bbbbbb_________B_
_B__bbbbbb_ppppppbbbbbbb__B_
_B__bbbbbb_ppppppbbbbbbb__B_
_B__bbbbbb_ppppppbbbbbbb__B_
_B__bbbbbb_ppppppbbbbbbb__B_
_B__bbbbbb_ppppppbbbbbbb__B_
_B__bbbbbb_ppppppbbbbbbb__B_
_B________________________B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B_________rrrrrr_rrrrrr__B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;
direction: rtl; }
x-tbody { background: rgba(0, 0, 255, 1) }
x-tr { background: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_6():
assert_pixels('table_column_backgrounds', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__bbbbbb_bbbbbb_rrrrrr__B_
_B__bbbbbb_bbbbbb_rrrrrr__B_
_B__bbbbbb_bbbbbb_rrrrrr__B_
_B__bbbbbb_bbbbbb_rrrrrr__B_
_B__bbbbbb_bbbbbb_rrrrrr__B_
_B__bbbbbb_bbbbbb_rrrrrr__B_
_B_________bbbbbb_________B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B________________________B_
_B__bbbbbb_bbbbbb_________B_
_B__bbbbbb_bbbbbb_________B_
_B__bbbbbb_bbbbbb_________B_
_B__bbbbbb_bbbbbb_________B_
_B__bbbbbb_bbbbbb_________B_
_B__bbbbbb_bbbbbb_________B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;}
x-colgroup { background: rgba(0, 0, 255, 1) }
x-col { background: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_6_rtl():
assert_pixels('table_column_backgrounds_rtl', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__rrrrrr_bbbbbb_bbbbbb__B_
_B__rrrrrr_bbbbbb_bbbbbb__B_
_B__rrrrrr_bbbbbb_bbbbbb__B_
_B__rrrrrr_bbbbbb_bbbbbb__B_
_B__rrrrrr_bbbbbb_bbbbbb__B_
_B__rrrrrr_bbbbbb_bbbbbb__B_
_B_________bbbbbb_________B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B________________________B_
_B_________bbbbbb_bbbbbb__B_
_B_________bbbbbb_bbbbbb__B_
_B_________bbbbbb_bbbbbb__B_
_B_________bbbbbb_bbbbbb__B_
_B_________bbbbbb_bbbbbb__B_
_B_________bbbbbb_bbbbbb__B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;
direction: rtl; }
x-colgroup { background: rgba(0, 0, 255, 1) }
x-col { background: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_7():
assert_pixels('table_borders_and_row_backgrounds', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bbbbbb_bBBBBb_bbbbbb__B_
_B_________bBBBBb_________B_
_B__rrrrrrrpbbbbp_rrrrrr__B_
_B__r______bBBBBp_r____r__B_
_B__r______bBBBBp_r____r__B_
_B__r______bBBBBp_r____r__B_
_B__r______bBBBBp_r____r__B_
_B__rrrrrrrpppppp_rrrrrr__B_
_B________________________B_
_B__rrrrrr_rrrrrr_________B_
_B__r____r_r____r_________B_
_B__r____r_r____r_________B_
_B__r____r_r____r_________B_
_B__r____r_r____r_________B_
_B__rrrrrr_rrrrrr_________B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed }
x-tr:first-child { background: blue }
x-td { border-color: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_7_rtl():
assert_pixels('table_borders_and_row_backgrounds_rtl', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__bbbbbb_bbbbbb_bbbbbb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bBBBBb_bBBBBb_bBBBBb__B_
_B__bbbbbb_bBBBBb_bbbbbb__B_
_B_________bBBBBb_________B_
_B__rrrrrr_pbbbbprrrrrrr__B_
_B__r____r_pBBBBb______r__B_
_B__r____r_pBBBBb______r__B_
_B__r____r_pBBBBb______r__B_
_B__r____r_pBBBBb______r__B_
_B__rrrrrr_pppppprrrrrrr__B_
_B________________________B_
_B_________rrrrrr_rrrrrr__B_
_B_________r____r_r____r__B_
_B_________r____r_r____r__B_
_B_________r____r_r____r__B_
_B_________r____r_r____r__B_
_B_________rrrrrr_rrrrrr__B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;
direction: rtl; }
x-tr:first-child { background: blue }
x-td { border-color: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_8():
assert_pixels('table_borders_and_column_backgrounds', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__bbbbbb_rrrrrr_rrrrrr__B_
_B__bBBBBb_r____r_r____r__B_
_B__bBBBBb_r____r_r____r__B_
_B__bBBBBb_r____r_r____r__B_
_B__bBBBBb_r____r_r____r__B_
_B__bbbbbb_r____r_rrrrrr__B_
_B_________r____r_________B_
_B__bbbbbbbpbbbbp_rrrrrr__B_
_B__bBBBBBBbBBBBp_r____r__B_
_B__bBBBBBBbBBBBp_r____r__B_
_B__bBBBBBBbBBBBp_r____r__B_
_B__bBBBBBBbBBBBp_r____r__B_
_B__bbbbbbbpppppp_rrrrrr__B_
_B________________________B_
_B__bbbbbb_rrrrrr_________B_
_B__bBBBBb_r____r_________B_
_B__bBBBBb_r____r_________B_
_B__bBBBBb_r____r_________B_
_B__bBBBBb_r____r_________B_
_B__bbbbbb_rrrrrr_________B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed }
x-col:first-child { background: blue }
x-td { border-color: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_8_rtl():
assert_pixels('table_borders_and_column_backgrounds_rtl', 28, 28, to_pix('''
____________________________
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
_B________________________B_
_B________________________B_
_B__rrrrrr_rrrrrr_bbbbbb__B_
_B__r____r_r____r_bBBBBb__B_
_B__r____r_r____r_bBBBBb__B_
_B__r____r_r____r_bBBBBb__B_
_B__r____r_r____r_bBBBBb__B_
_B__rrrrrr_r____r_bbbbbb__B_
_B_________r____r_________B_
_B__rrrrrr_pbbbbpbbbbbbb__B_
_B__r____r_pBBBBbBBBBBBb__B_
_B__r____r_pBBBBbBBBBBBb__B_
_B__r____r_pBBBBbBBBBBBb__B_
_B__r____r_pBBBBbBBBBBBb__B_
_B__rrrrrr_ppppppbbbbbbb__B_
_B________________________B_
_B_________rrrrrr_bbbbbb__B_
_B_________r____r_bBBBBb__B_
_B_________r____r_bBBBBb__B_
_B_________r____r_bBBBBb__B_
_B_________r____r_bBBBBb__B_
_B_________rrrrrr_bbbbbb__B_
_B________________________B_
_B________________________B_
_BBBBBBBBBBBBBBBBBBBBBBBBBB_
____________________________
'''), tables_source % {'extra_css': '''
x-table { border-color: #00f; table-layout: fixed;
direction: rtl; }
x-col:first-child { background: blue }
x-td { border-color: rgba(255, 0, 0, 0.5) }
'''})
@assert_no_logs
def test_tables_9():
assert_pixels('collapsed_border_thead', 22, 36, '''
______________________
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
__R_____R____R_____R__
__R_____R____R_____R__
__RRRRRRRRRRRRRRRRRR__
__R_____R____R_____R__
__R_____R____R_____R__
__RRRRRRRRRRRRRRRRRR__
______________________
______________________
______________________
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
__R_____R____R_____R__
__RRRRRRRRRRRRRRRRRR__
______________________
______________________
______________________
______________________
______________________
______________________
______________________
______________________
''', '''
<style>
@page { size: 22px 18px; margin: 1px; background: #fff }
td { border: 1px red solid; width: 4px; height: 2px; }
</style>
<table style="table-layout: fixed; border-collapse: collapse">
<thead style="border: blue solid; border-width: 3px;
"><td></td><td></td><td></td></thead>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>''')
@assert_no_logs
def test_tables_10():
assert_pixels('collapsed_border_tfoot', 22, 34, '''
______________________
__RRRRRRRRRRRRRRRRRR__
__R_____R____R_____R__
__R_____R____R_____R__
__RRRRRRRRRRRRRRRRRR__
__R_____R____R_____R__
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
______________________
______________________
______________________
______________________
__RRRRRRRRRRRRRRRRRR__
__R_____R____R_____R__
__R_____R____R_____R__
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
______________________
______________________
______________________
______________________
______________________
''', '''
<style>
@page { size: 22px 17px; margin: 1px; background: #fff }
td { border: 1px red solid; width: 4px; height: 2px; }
</style>
<table style="table-layout: fixed; margin-left: 1px;
border-collapse: collapse">
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tfoot style="border: blue solid; border-width: 3px;
"><td></td><td></td><td></td></tfoot>''')
@assert_no_logs
def test_tables_11():
# Regression test for inline table with collapsed border and alignment
# rendering borders incorrectly
# https://github.com/Kozea/WeasyPrint/issues/82
assert_pixels('inline_text_align', 20, 10, '''
____________________
________RRRRRRRRRRR_
________R____R____R_
________R____R____R_
________R____R____R_
________RRRRRRRRRRR_
____________________
____________________
____________________
____________________
''', '''
<style>
@page { size: 20px 10px; margin: 1px; background: #fff }
body { text-align: right; font-size: 0 }
table { display: inline-table; width: 11px }
td { border: 1px red solid; width: 4px; height: 3px }
</style>
<table style="table-layout: fixed; border-collapse: collapse">
<tr><td></td><td></td></tr>''')
@assert_no_logs
def test_tables_12():
assert_pixels('table_collapsed_borders', 28, 28, to_pix('''
____________________________
_________BBBBBBBBBBBBBBBBBB_
_________BBBBBBBBBBBBBBBBBB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BBrrrrr____rrrrrBB_
_________BB____r_________BB_
_________BB____r_________BB_
_________BB____r_________BB_
_________BB____r_________BB_
_________BBrrrrrrrrrrrrrrBB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BB____r____r____BB_
_________BBBBBBBBBBBBBBBBBB_
_________BBBBBBBBBBBBBBBBBB_
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
'''), tables_source % {'extra_css': '''
body { direction: rtl }
x-table { border: 2px solid #00f; table-layout: fixed;
border-collapse: collapse }
x-td { border-color: #ff7f7f }
'''})
@assert_no_logs
def test_tables_13():
assert_pixels('table_collapsed_borders_paged', 28, 52, to_pix('''
____________________________
_gggggggggggggggggggggggggg_
_g________________________g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BBrrrrr____rrrrrBB_g_
_g_____BB____r_________BB_g_
_g_____BB____r_________BB_g_
_g_____BB____r_________BB_g_
_g_____BB____r_________BB_g_
_g_____BBrrrrrrrrrrrrrrBB_g_
_g________________________g_
_g________________________g_
_g________________________g_
_gggggggggggggggggggggggggg_
____________________________
____________________________
_gggggggggggggggggggggggggg_
_g_____BBrrrrrrrrrrrrrrBB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BB____r____r____BB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g_____BBBBBBBBBBBBBBBBBB_g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_g________________________g_
_gggggggggggggggggggggggggg_
____________________________
'''), tables_source % {'extra_css': '''
body { direction: rtl }
x-table { border: solid #00f; border-width: 8px 2px;
table-layout: fixed; border-collapse: collapse }
x-td { border-color: #ff7f7f }
@page { size: 28px 26px; margin: 1px;
border: 1px solid rgba(0, 255, 0, 0.5); }
'''})
@pytest.mark.xfail
@assert_no_logs
def test_tables_14():
assert_pixels('table_background_column_paged', 28, 52, to_pix('''
____________________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_RRR_RRR_RRR________________
_____RRR____________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
_RRRRRRR_RRR________________
____________________________
____________________________
____________________________
____________________________
____________________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
_RRR_RRR____________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
____________________________
'''), tables_source % {'extra_css': '''
@page { size: 28px 26px }
x-table { margin: 0; padding: 0; border: 0 }
x-col { background: red }
x-td { padding: 0; width: 1px; height: 8px }
'''})
@assert_no_logs
def test_tables_15():
# Regression test for colspan in last body line with footer
# https://github.com/Kozea/WeasyPrint/issues/1250
assert_pixels('colspan_last_row', 22, 36, '''
______________________
__RRRRRRRRRRRRRRRRRR__
__R_____R____R_____R__
__R_____R____R_____R__
__R_____R____R_____R__
__RRRRRRRRRRRRRRRRRR__
__R_____R____R_____R__
__R_____R____R_____R__
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
______________________
______________________
__RRRRRRRRRRRRRRRRRR__
__R________________R__
__R________________R__
__R________________R__
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBB____R____R____BBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
_BBBBBBBBBBBBBBBBBBBB_
______________________
______________________
______________________
______________________
''', '''
<style>
@page { size: 22px 18px; margin: 1px; background: #fff }
td { border: 1px red solid; width: 4px; height: 3px; }
</style>
<table style="table-layout: fixed; margin-left: 1px;
border-collapse: collapse">
<tr><td></td><td></td><td></td></tr>
<tr><td></td><td></td><td></td></tr>
<tr><td colspan="3"></td></tr>
<tfoot style="border: blue solid; border-width: 3px;
"><td></td><td></td><td></td></tfoot>''')
@assert_no_logs
def test_tables_16():
assert_pixels('table_absolute', 20, 10, '''
____________________
_RRRRRRRRRRR________
_R____R____R________
_R____R____R________
_R____R_RRRRRRRRRRR_
_RRRRRRRRRRR_R____R_
________R____R____R_
________R____R____R_
________RRRRRRRRRRR_
____________________
''', '''
<style>
@page { size: 20px 10px; margin: 1px; background: #fff }
body { text-align: right; font-size: 0 }
table { position: absolute; width: 11px;
table-layout: fixed; border-collapse: collapse }
td { border: 1px red solid; width: 4px; height: 3px }
</style>
<table style="top: 0; left: 0">
<tr><td></td><td></td></tr>
<table style="bottom: 0; right: 0">
<tr><td></td><td></td></tr>''')
@assert_no_logs
def test_tables_17():
assert_pixels('table_split_collapse', 16, 20, '''
________________
_RRRRRRRRRRRRRR_
_RRRRRRRRRRRRRR_
_RR____RR____RR_
_RR_BB_RR_BB_RR_
_RR_BB_RR_BB_RR_
_RR_BB_RR____RR_
_RR_BB_RR____RR_
_RR____RR____RR_
________________
________________
_RR_BB_RR____RR_
_RR_BB_RR____RR_
_RR_BB_RR____RR_
_RR_BB_RR____RR_
_RR____RR____RR_
_RRRRRRRRRRRRRR_
_RRRRRRRRRRRRRR_
________________
________________
''', '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page { size: 16px 10px; margin: 1px; background: #fff }
table { border-collapse: collapse; font-size: 2px; line-height: 1;
color: blue; font-family: weasyprint }
td { border: 2px red solid; padding: 1px; line-height: 1 }
</style>
<table><tr><td>a a a a</td><td>a</td></tr>''')
@assert_no_logs
def test_tables_18():
assert_pixels('table_split_separate', 12, 22, '''
____________
_RRRRRRRRRR_
_R________R_
_R_RRRRRR_R_
_R_R____R_R_
_R_R_BB_R_R_
_R_R_BB_R_R_
_R_R_BB_R_R_
_R_R_BB_R_R_
_R_R____R_R_
____________
____________
_R_R_BB_R_R_
_R_R_BB_R_R_
_R_R_BB_R_R_
_R_R_BB_R_R_
_R_R____R_R_
_R_RRRRRR_R_
_R________R_
_RRRRRRRRRR_
____________
____________
''', '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page { size: 12px 11px; margin: 1px; background: #fff }
table { border: 1px red solid; border-spacing: 1px; font-size: 2px;
line-height: 1; color: blue; font-family: weasyprint }
td { border: 1px red solid; padding: 1px; line-height: 1; }
</style>
<table><tr><td>a a a a</td></tr>''')
@assert_no_logs
def test_tables_19():
# Regression test: https://github.com/Kozea/WeasyPrint/issues/1523
assert_pixels('table_split_crash', 2, 8, '''
RR
RR
RR
RR
RR
RR
RR
RR
''', '''
<style>
@font-face {src: url(weasyprint.otf); font-family: weasyprint}
@page {size: 2px 4px}
table {border-collapse: collapse; color: red}
body {font-size: 2px; font-family: weasyprint; line-height: 1}
</style>
<table><tr><td>a a a a</td></tr></table>''')
|
Kozea/WeasyPrint
|
tests/draw/test_table.py
|
Python
|
bsd-3-clause
| 40,615
|
# Copyright 2014 VMware, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import json
from oslo.config import cfg
from neutron.common import constants
from neutron.common import exceptions as exception
from neutron.openstack.common import log
from neutron.plugins.vmware.api_client import exception as api_exc
from neutron.plugins.vmware.common import exceptions as nsx_exc
from neutron.plugins.vmware.common import utils
from neutron.plugins.vmware import nsxlib
HTTP_GET = "GET"
HTTP_POST = "POST"
HTTP_DELETE = "DELETE"
HTTP_PUT = "PUT"
LSWITCH_RESOURCE = "lswitch"
LSWITCHPORT_RESOURCE = "lport/%s" % LSWITCH_RESOURCE
LOG = log.getLogger(__name__)
def _configure_extensions(lport_obj, mac_address, fixed_ips,
port_security_enabled, security_profiles,
queue_id, mac_learning_enabled,
allowed_address_pairs):
lport_obj['allowed_address_pairs'] = []
if port_security_enabled:
for fixed_ip in fixed_ips:
ip_address = fixed_ip.get('ip_address')
if ip_address:
lport_obj['allowed_address_pairs'].append(
{'mac_address': mac_address, 'ip_address': ip_address})
# add address pair allowing src_ip 0.0.0.0 to leave
# this is required for outgoing dhcp request
lport_obj["allowed_address_pairs"].append(
{"mac_address": mac_address,
"ip_address": "0.0.0.0"})
lport_obj['security_profiles'] = list(security_profiles or [])
lport_obj['queue_uuid'] = queue_id
if mac_learning_enabled is not None:
lport_obj["mac_learning"] = mac_learning_enabled
lport_obj["type"] = "LogicalSwitchPortConfig"
for address_pair in list(allowed_address_pairs or []):
lport_obj['allowed_address_pairs'].append(
{'mac_address': address_pair['mac_address'],
'ip_address': address_pair['ip_address']})
def get_lswitch_by_id(cluster, lswitch_id):
try:
lswitch_uri_path = nsxlib._build_uri_path(
LSWITCH_RESOURCE, lswitch_id,
relations="LogicalSwitchStatus")
return nsxlib.do_request(HTTP_GET, lswitch_uri_path, cluster=cluster)
except exception.NotFound:
# FIXME(salv-orlando): this should not raise a neutron exception
raise exception.NetworkNotFound(net_id=lswitch_id)
def get_lswitches(cluster, neutron_net_id):
def lookup_switches_by_tag():
# Fetch extra logical switches
lswitch_query_path = nsxlib._build_uri_path(
LSWITCH_RESOURCE,
fields="uuid,display_name,tags,lport_count",
relations="LogicalSwitchStatus",
filters={'tag': neutron_net_id,
'tag_scope': 'quantum_net_id'})
return nsxlib.get_all_query_pages(lswitch_query_path, cluster)
lswitch_uri_path = nsxlib._build_uri_path(LSWITCH_RESOURCE, neutron_net_id,
relations="LogicalSwitchStatus")
results = []
try:
ls = nsxlib.do_request(HTTP_GET, lswitch_uri_path, cluster=cluster)
results.append(ls)
for tag in ls['tags']:
if (tag['scope'] == "multi_lswitch" and
tag['tag'] == "True"):
results.extend(lookup_switches_by_tag())
except exception.NotFound:
# This is legit if the neutron network was created using
# a post-Havana version of the plugin
results.extend(lookup_switches_by_tag())
if results:
return results
else:
raise exception.NetworkNotFound(net_id=neutron_net_id)
def create_lswitch(cluster, neutron_net_id, tenant_id, display_name,
transport_zones_config,
shared=None,
**kwargs):
# The tag scope adopts a slightly different naming convention for
# historical reasons
lswitch_obj = {"display_name": utils.check_and_truncate(display_name),
"transport_zones": transport_zones_config,
"replication_mode": cfg.CONF.NSX.replication_mode,
"tags": utils.get_tags(os_tid=tenant_id,
quantum_net_id=neutron_net_id)}
# TODO(salv-orlando): Now that we have async status synchronization
# this tag is perhaps not needed anymore
if shared:
lswitch_obj["tags"].append({"tag": "true",
"scope": "shared"})
if "tags" in kwargs:
lswitch_obj["tags"].extend(kwargs["tags"])
uri = nsxlib._build_uri_path(LSWITCH_RESOURCE)
lswitch = nsxlib.do_request(HTTP_POST, uri, json.dumps(lswitch_obj),
cluster=cluster)
LOG.debug(_("Created logical switch: %s"), lswitch['uuid'])
return lswitch
def update_lswitch(cluster, lswitch_id, display_name,
tenant_id=None, **kwargs):
uri = nsxlib._build_uri_path(LSWITCH_RESOURCE, resource_id=lswitch_id)
lswitch_obj = {"display_name": utils.check_and_truncate(display_name),
"tags": utils.get_tags(os_tid=tenant_id)}
if "tags" in kwargs:
lswitch_obj["tags"].extend(kwargs["tags"])
try:
return nsxlib.do_request(HTTP_PUT, uri, json.dumps(lswitch_obj),
cluster=cluster)
except exception.NotFound as e:
LOG.error(_("Network not found, Error: %s"), str(e))
raise exception.NetworkNotFound(net_id=lswitch_id)
def delete_network(cluster, net_id, lswitch_id):
delete_networks(cluster, net_id, [lswitch_id])
#TODO(salvatore-orlando): Simplify and harmonize
def delete_networks(cluster, net_id, lswitch_ids):
for ls_id in lswitch_ids:
path = "/ws.v1/lswitch/%s" % ls_id
try:
nsxlib.do_request(HTTP_DELETE, path, cluster=cluster)
except exception.NotFound as e:
LOG.error(_("Network not found, Error: %s"), str(e))
raise exception.NetworkNotFound(net_id=ls_id)
def query_lswitch_lports(cluster, ls_uuid, fields="*",
filters=None, relations=None):
# Fix filter for attachments
if filters and "attachment" in filters:
filters['attachment_vif_uuid'] = filters["attachment"]
del filters['attachment']
uri = nsxlib._build_uri_path(LSWITCHPORT_RESOURCE,
parent_resource_id=ls_uuid,
fields=fields,
filters=filters,
relations=relations)
return nsxlib.do_request(HTTP_GET, uri, cluster=cluster)['results']
def delete_port(cluster, switch, port):
uri = "/ws.v1/lswitch/" + switch + "/lport/" + port
try:
nsxlib.do_request(HTTP_DELETE, uri, cluster=cluster)
except exception.NotFound:
LOG.exception(_("Port or Network not found"))
raise exception.PortNotFoundOnNetwork(
net_id=switch, port_id=port)
except api_exc.NsxApiException:
raise exception.NeutronException()
def get_ports(cluster, networks=None, devices=None, tenants=None):
vm_filter_obsolete = ""
vm_filter = ""
tenant_filter = ""
# This is used when calling delete_network. Neutron checks to see if
# the network has any ports.
if networks:
# FIXME (Aaron) If we get more than one network_id this won't work
lswitch = networks[0]
else:
lswitch = "*"
if devices:
for device_id in devices:
vm_filter_obsolete = '&'.join(
["tag_scope=vm_id",
"tag=%s" % utils.device_id_to_vm_id(device_id,
obfuscate=True),
vm_filter_obsolete])
vm_filter = '&'.join(
["tag_scope=vm_id",
"tag=%s" % utils.device_id_to_vm_id(device_id),
vm_filter])
if tenants:
for tenant in tenants:
tenant_filter = '&'.join(
["tag_scope=os_tid",
"tag=%s" % tenant,
tenant_filter])
nsx_lports = {}
lport_fields_str = ("tags,admin_status_enabled,display_name,"
"fabric_status_up")
try:
lport_query_path_obsolete = (
"/ws.v1/lswitch/%s/lport?fields=%s&%s%stag_scope=q_port_id"
"&relations=LogicalPortStatus" %
(lswitch, lport_fields_str, vm_filter_obsolete, tenant_filter))
lport_query_path = (
"/ws.v1/lswitch/%s/lport?fields=%s&%s%stag_scope=q_port_id"
"&relations=LogicalPortStatus" %
(lswitch, lport_fields_str, vm_filter, tenant_filter))
try:
# NOTE(armando-migliaccio): by querying with obsolete tag first
# current deployments won't take the performance hit of a double
# call. In release L-** or M-**, we might want to swap the calls
# as it's likely that ports with the new tag would outnumber the
# ones with the old tag
ports = nsxlib.get_all_query_pages(lport_query_path_obsolete,
cluster)
if not ports:
ports = nsxlib.get_all_query_pages(lport_query_path, cluster)
except exception.NotFound:
LOG.warn(_("Lswitch %s not found in NSX"), lswitch)
ports = None
if ports:
for port in ports:
for tag in port["tags"]:
if tag["scope"] == "q_port_id":
nsx_lports[tag["tag"]] = port
except Exception:
err_msg = _("Unable to get ports")
LOG.exception(err_msg)
raise nsx_exc.NsxPluginException(err_msg=err_msg)
return nsx_lports
def get_port_by_neutron_tag(cluster, lswitch_uuid, neutron_port_id):
"""Get port by neutron tag.
Returns the NSX UUID of the logical port with tag q_port_id equal to
neutron_port_id or None if the port is not Found.
"""
uri = nsxlib._build_uri_path(LSWITCHPORT_RESOURCE,
parent_resource_id=lswitch_uuid,
fields='uuid',
filters={'tag': neutron_port_id,
'tag_scope': 'q_port_id'})
LOG.debug(_("Looking for port with q_port_id tag '%(neutron_port_id)s' "
"on: '%(lswitch_uuid)s'"),
{'neutron_port_id': neutron_port_id,
'lswitch_uuid': lswitch_uuid})
res = nsxlib.do_request(HTTP_GET, uri, cluster=cluster)
num_results = len(res["results"])
if num_results >= 1:
if num_results > 1:
LOG.warn(_("Found '%(num_ports)d' ports with "
"q_port_id tag: '%(neutron_port_id)s'. "
"Only 1 was expected."),
{'num_ports': num_results,
'neutron_port_id': neutron_port_id})
return res["results"][0]
def get_port(cluster, network, port, relations=None):
LOG.info(_("get_port() %(network)s %(port)s"),
{'network': network, 'port': port})
uri = "/ws.v1/lswitch/" + network + "/lport/" + port + "?"
if relations:
uri += "relations=%s" % relations
try:
return nsxlib.do_request(HTTP_GET, uri, cluster=cluster)
except exception.NotFound as e:
LOG.error(_("Port or Network not found, Error: %s"), str(e))
raise exception.PortNotFoundOnNetwork(
port_id=port, net_id=network)
def update_port(cluster, lswitch_uuid, lport_uuid, neutron_port_id, tenant_id,
display_name, device_id, admin_status_enabled,
mac_address=None, fixed_ips=None, port_security_enabled=None,
security_profiles=None, queue_id=None,
mac_learning_enabled=None, allowed_address_pairs=None):
lport_obj = dict(
admin_status_enabled=admin_status_enabled,
display_name=utils.check_and_truncate(display_name),
tags=utils.get_tags(os_tid=tenant_id,
q_port_id=neutron_port_id,
vm_id=utils.device_id_to_vm_id(device_id)))
_configure_extensions(lport_obj, mac_address, fixed_ips,
port_security_enabled, security_profiles,
queue_id, mac_learning_enabled,
allowed_address_pairs)
path = "/ws.v1/lswitch/" + lswitch_uuid + "/lport/" + lport_uuid
try:
result = nsxlib.do_request(HTTP_PUT, path, json.dumps(lport_obj),
cluster=cluster)
LOG.debug(_("Updated logical port %(result)s "
"on logical switch %(uuid)s"),
{'result': result['uuid'], 'uuid': lswitch_uuid})
return result
except exception.NotFound as e:
LOG.error(_("Port or Network not found, Error: %s"), str(e))
raise exception.PortNotFoundOnNetwork(
port_id=lport_uuid, net_id=lswitch_uuid)
def create_lport(cluster, lswitch_uuid, tenant_id, neutron_port_id,
display_name, device_id, admin_status_enabled,
mac_address=None, fixed_ips=None, port_security_enabled=None,
security_profiles=None, queue_id=None,
mac_learning_enabled=None, allowed_address_pairs=None):
"""Creates a logical port on the assigned logical switch."""
display_name = utils.check_and_truncate(display_name)
lport_obj = dict(
admin_status_enabled=admin_status_enabled,
display_name=display_name,
tags=utils.get_tags(os_tid=tenant_id,
q_port_id=neutron_port_id,
vm_id=utils.device_id_to_vm_id(device_id))
)
_configure_extensions(lport_obj, mac_address, fixed_ips,
port_security_enabled, security_profiles,
queue_id, mac_learning_enabled,
allowed_address_pairs)
path = nsxlib._build_uri_path(LSWITCHPORT_RESOURCE,
parent_resource_id=lswitch_uuid)
result = nsxlib.do_request(HTTP_POST, path, json.dumps(lport_obj),
cluster=cluster)
LOG.debug(_("Created logical port %(result)s on logical switch %(uuid)s"),
{'result': result['uuid'], 'uuid': lswitch_uuid})
return result
def get_port_status(cluster, lswitch_id, port_id):
"""Retrieve the operational status of the port."""
try:
r = nsxlib.do_request(HTTP_GET,
"/ws.v1/lswitch/%s/lport/%s/status" %
(lswitch_id, port_id), cluster=cluster)
except exception.NotFound as e:
LOG.error(_("Port not found, Error: %s"), str(e))
raise exception.PortNotFoundOnNetwork(
port_id=port_id, net_id=lswitch_id)
if r['link_status_up'] is True:
return constants.PORT_STATUS_ACTIVE
else:
return constants.PORT_STATUS_DOWN
def plug_interface(cluster, lswitch_id, lport_id, att_obj):
return nsxlib.do_request(HTTP_PUT,
nsxlib._build_uri_path(LSWITCHPORT_RESOURCE,
lport_id, lswitch_id,
is_attachment=True),
json.dumps(att_obj),
cluster=cluster)
def plug_vif_interface(
cluster, lswitch_id, port_id, port_type, attachment=None):
"""Plug a VIF Attachment object in a logical port."""
lport_obj = {}
if attachment:
lport_obj["vif_uuid"] = attachment
lport_obj["type"] = port_type
return plug_interface(cluster, lswitch_id, port_id, lport_obj)
|
subramani95/neutron
|
neutron/plugins/vmware/nsxlib/switch.py
|
Python
|
apache-2.0
| 16,354
|
# -*- coding: utf-8 -*-
# Comments and reviews for records.
# This file is part of Invenio.
# Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2012, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Comments and reviews for records: web interface """
__lastupdated__ = """$Date$"""
__revision__ = """$Id$"""
import cgi
from six import iteritems
from invenio.modules.comments.api import check_recID_is_in_range, \
perform_request_display_comments_or_remarks, \
perform_request_add_comment_or_remark, \
perform_request_vote, \
perform_request_report, \
subscribe_user_to_discussion, \
unsubscribe_user_from_discussion, \
get_user_subscription_to_discussion, \
check_user_can_attach_file_to_comments, \
check_user_can_view_comments, \
check_user_can_send_comments, \
check_user_can_view_comment, \
query_get_comment, \
toggle_comment_visibility, \
check_comment_belongs_to_record, \
is_comment_deleted, \
perform_display_your_comments
from invenio.config import \
CFG_TMPSHAREDDIR, \
CFG_SITE_LANG, \
CFG_SITE_URL, \
CFG_SITE_SECURE_URL, \
CFG_PREFIX, \
CFG_SITE_NAME, \
CFG_SITE_NAME_INTL, \
CFG_WEBCOMMENT_ALLOW_COMMENTS,\
CFG_WEBCOMMENT_ALLOW_REVIEWS, \
CFG_WEBCOMMENT_USE_MATHJAX_IN_COMMENTS, \
CFG_SITE_RECORD, \
CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE, \
CFG_WEBCOMMENT_MAX_ATTACHED_FILES, \
CFG_ACCESS_CONTROL_LEVEL_SITE
from invenio.legacy.webuser import getUid, page_not_authorized, isGuestUser, collect_user_info
from invenio.legacy.webpage import page, pageheaderonly, pagefooteronly
from invenio.legacy.search_engine import create_navtrail_links, \
guess_primary_collection_of_a_record
from invenio.utils.url import redirect_to_url, \
make_canonical_urlargd
from invenio.utils.html import get_mathjax_header
from invenio.ext.logging import register_exception
from invenio.base.i18n import gettext_set_language
from invenio.ext.legacy.handler import wash_urlargd, WebInterfaceDirectory
from invenio.legacy.websearch.adminlib import get_detailed_page_tabs, get_detailed_page_tabs_counts
from invenio.modules.access.local_config import VIEWRESTRCOLL
from invenio.modules.access.mailcookie import \
mail_cookie_create_authorize_action, \
mail_cookie_create_common, \
mail_cookie_check_common, \
InvenioWebAccessMailCookieDeletedError, \
InvenioWebAccessMailCookieError
from invenio.modules.comments.config import \
InvenioWebCommentError, \
InvenioWebCommentWarning
import invenio.legacy.template
webstyle_templates = invenio.legacy.template.load('webstyle')
websearch_templates = invenio.legacy.template.load('websearch')
import os
from invenio.utils import apache
from invenio.legacy.bibdocfile.api import \
stream_file, \
decompose_file, \
propose_next_docname
from invenio.modules.collections.models import Collection
class WebInterfaceCommentsPages(WebInterfaceDirectory):
"""Defines the set of /comments pages."""
_exports = ['', 'display', 'add', 'vote', 'report', 'index', 'attachments',
'subscribe', 'unsubscribe', 'toggle']
def __init__(self, recid=-1, reviews=0):
self.recid = recid
self.discussion = reviews # 0:comments, 1:reviews
self.attachments = WebInterfaceCommentsFiles(recid, reviews)
def index(self, req, form):
"""
Redirects to display function
"""
return self.display(req, form)
def display(self, req, form):
"""
Display comments (reviews if enabled) associated with record having id recid where recid>0.
This function can also be used to display remarks associated with basket having id recid where recid<-99.
@param ln: language
@param recid: record id, integer
@param do: display order hh = highest helpful score, review only
lh = lowest helpful score, review only
hs = highest star score, review only
ls = lowest star score, review only
od = oldest date
nd = newest date
@param ds: display since all= no filtering by date
nd = n days ago
nw = n weeks ago
nm = n months ago
ny = n years ago
where n is a single digit integer between 0 and 9
@param nb: number of results per page
@param p: results page
@param voted: boolean, active if user voted for a review, see vote function
@param reported: int, active if user reported a certain comment/review, see report function
@param reviews: boolean, enabled for reviews, disabled for comments
@param subscribed: int, 1 if user just subscribed to discussion, -1 if unsubscribed
@return the full html page.
"""
argd = wash_urlargd(form, {'do': (str, "od"),
'ds': (str, "all"),
'nb': (int, 100),
'p': (int, 1),
'voted': (int, -1),
'reported': (int, -1),
'subscribed': (int, 0),
'cmtgrp': (list, ["latest"]) # 'latest' is now a reserved group/round name
})
_ = gettext_set_language(argd['ln'])
uid = getUid(req)
user_info = collect_user_info(req)
(auth_code, auth_msg) = check_user_can_view_comments(user_info, self.recid)
if auth_code and user_info['email'] == 'guest':
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
elif auth_code:
return page_not_authorized(req, "../", \
text = auth_msg)
can_send_comments = False
(auth_code, auth_msg) = check_user_can_send_comments(user_info, self.recid)
if not auth_code:
can_send_comments = True
can_attach_files = False
(auth_code, auth_msg) = check_user_can_attach_file_to_comments(user_info, self.recid)
if not auth_code and (user_info['email'] != 'guest'):
can_attach_files = True
subscription = get_user_subscription_to_discussion(self.recid, uid)
if subscription == 1:
user_is_subscribed_to_discussion = True
user_can_unsubscribe_from_discussion = True
elif subscription == 2:
user_is_subscribed_to_discussion = True
user_can_unsubscribe_from_discussion = False
else:
user_is_subscribed_to_discussion = False
user_can_unsubscribe_from_discussion = False
col_id = Collection.query.filter_by(
name=guess_primary_collection_of_a_record(self.recid)).value('id')
unordered_tabs = get_detailed_page_tabs(col_id, self.recid,
ln=argd['ln'])
ordered_tabs_id = [(tab_id, values['order']) for (tab_id, values) in iteritems(unordered_tabs)]
ordered_tabs_id.sort(lambda x, y: cmp(x[1], y[1]))
link_ln = ''
if argd['ln'] != CFG_SITE_LANG:
link_ln = '?ln=%s' % argd['ln']
tabs = [(unordered_tabs[tab_id]['label'], \
'%s/record/%s/%s%s' % (CFG_SITE_URL, self.recid, tab_id, link_ln), \
tab_id in ['comments', 'reviews'],
unordered_tabs[tab_id]['enabled']) \
for (tab_id, order) in ordered_tabs_id
if unordered_tabs[tab_id]['visible'] == True]
tabs_counts = get_detailed_page_tabs_counts(self.recid)
citedbynum = tabs_counts['Citations']
references = tabs_counts['References']
discussions = tabs_counts['Discussions']
top = webstyle_templates.detailed_record_container_top(self.recid,
tabs,
argd['ln'],
citationnum=citedbynum,
referencenum=references,
discussionnum=discussions)
bottom = webstyle_templates.detailed_record_container_bottom(self.recid,
tabs,
argd['ln'])
#display_comment_rounds = [cmtgrp for cmtgrp in argd['cmtgrp'] if cmtgrp.isdigit() or cmtgrp == "all" or cmtgrp == "-1"]
display_comment_rounds = argd['cmtgrp']
check_warnings = []
(ok, problem) = check_recID_is_in_range(self.recid, check_warnings, argd['ln'])
if ok:
body = perform_request_display_comments_or_remarks(req=req, recID=self.recid,
display_order=argd['do'],
display_since=argd['ds'],
nb_per_page=argd['nb'],
page=argd['p'],
ln=argd['ln'],
voted=argd['voted'],
reported=argd['reported'],
subscribed=argd['subscribed'],
reviews=self.discussion,
uid=uid,
can_send_comments=can_send_comments,
can_attach_files=can_attach_files,
user_is_subscribed_to_discussion=user_is_subscribed_to_discussion,
user_can_unsubscribe_from_discussion=user_can_unsubscribe_from_discussion,
display_comment_rounds=display_comment_rounds
)
title, description, keywords = websearch_templates.tmpl_record_page_header_content(req, self.recid, argd['ln'])
navtrail = create_navtrail_links(cc=guess_primary_collection_of_a_record(self.recid), ln=argd['ln'])
if navtrail:
navtrail += ' > '
navtrail += '<a class="navtrail" href="%s/%s/%s?ln=%s">'% (CFG_SITE_URL, CFG_SITE_RECORD, self.recid, argd['ln'])
navtrail += cgi.escape(title)
navtrail += '</a>'
navtrail += ' > <a class="navtrail">%s</a>' % (self.discussion==1 and _("Reviews") or _("Comments"))
mathjaxheader = ''
if CFG_WEBCOMMENT_USE_MATHJAX_IN_COMMENTS:
mathjaxheader = get_mathjax_header(req.is_https())
jqueryheader = '''
<script src="%(CFG_SITE_URL)s/vendors/jquery-multifile/jquery.MultiFile.pack.js" type="text/javascript"></script>
''' % {'CFG_SITE_URL': CFG_SITE_URL}
return pageheaderonly(title=title,
navtrail=navtrail,
uid=uid,
verbose=1,
metaheaderadd = mathjaxheader + jqueryheader,
req=req,
language=argd['ln'],
navmenuid='search',
navtrail_append_title_p=0) + \
websearch_templates.tmpl_search_pagestart(argd['ln']) + \
top + body + bottom + \
websearch_templates.tmpl_search_pageend(argd['ln']) + \
pagefooteronly(lastupdated=__lastupdated__, language=argd['ln'], req=req)
else:
return page(title=_("Record Not Found"),
body=problem,
uid=uid,
verbose=1,
req=req,
language=argd['ln'],
navmenuid='search')
# Return the same page wether we ask for /CFG_SITE_RECORD/123 or /CFG_SITE_RECORD/123/
__call__ = index
def add(self, req, form):
"""
Add a comment (review) to record with id recid where recid>0
Also works for adding a remark to basket with id recid where recid<-99
@param ln: languange
@param recid: record id
@param action: 'DISPLAY' to display add form
'SUBMIT' to submit comment once form is filled
'REPLY' to reply to an already existing comment
@param msg: the body of the comment/review or remark
@param score: star score of the review
@param note: title of the review
@param comid: comment id, needed for replying
@param editor_type: the type of editor used for submitting the
comment: 'textarea', 'ckeditor'.
@param subscribe: if set, subscribe user to receive email
notifications when new comment are added to
this discussion
@return the full html page.
"""
argd = wash_urlargd(form, {'action': (str, "DISPLAY"),
'msg': (str, ""),
'note': (str, ''),
'score': (int, 0),
'comid': (int, 0),
'editor_type': (str, ""),
'subscribe': (str, ""),
'cookie': (str, "")
})
_ = gettext_set_language(argd['ln'])
actions = ['DISPLAY', 'REPLY', 'SUBMIT']
uid = getUid(req)
# Is site ready to accept comments?
if uid == -1 or (not CFG_WEBCOMMENT_ALLOW_COMMENTS and not CFG_WEBCOMMENT_ALLOW_REVIEWS):
return page_not_authorized(req, "../comments/add",
navmenuid='search')
# Is user allowed to post comment?
user_info = collect_user_info(req)
(auth_code_1, auth_msg_1) = check_user_can_view_comments(user_info, self.recid)
(auth_code_2, auth_msg_2) = check_user_can_send_comments(user_info, self.recid)
if isGuestUser(uid):
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
# Save user's value in cookie, so that these "POST"
# parameters are not lost during login process
msg_cookie = mail_cookie_create_common('comment_msg',
{'msg': argd['msg'],
'note': argd['note'],
'score': argd['score'],
'editor_type': argd['editor_type'],
'subscribe': argd['subscribe']},
onetime=True)
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri'] + '&cookie=' + msg_cookie}, {})
return redirect_to_url(req, target, norobot=True)
elif (auth_code_1 or auth_code_2):
return page_not_authorized(req, "../", \
text = auth_msg_1 + auth_msg_2)
if argd['comid']:
# If replying to a comment, are we on a record that
# matches the original comment user is replying to?
if not check_comment_belongs_to_record(argd['comid'], self.recid):
return page_not_authorized(req, "../", \
text = _("Specified comment does not belong to this record"))
# Is user trying to reply to a restricted comment? Make
# sure user has access to it. We will then inherit its
# restriction for the new comment
(auth_code, auth_msg) = check_user_can_view_comment(user_info, argd['comid'])
if auth_code:
return page_not_authorized(req, "../", \
text = _("You do not have access to the specified comment"))
# Is user trying to reply to a deleted comment? If so, we
# let submitted comment go (to not lose possibly submitted
# content, if comment is submitted while original is
# deleted), but we "reset" comid to make sure that for
# action 'REPLY' the original comment is not included in
# the reply
if is_comment_deleted(argd['comid']):
argd['comid'] = 0
user_info = collect_user_info(req)
can_attach_files = False
(auth_code, auth_msg) = check_user_can_attach_file_to_comments(user_info, self.recid)
if not auth_code and (user_info['email'] != 'guest'):
can_attach_files = True
warning_msgs = [] # list of warning tuples (warning_text, warning_color)
added_files = {}
if can_attach_files:
# User is allowed to attach files. Process the files
file_too_big = False
formfields = form.get('commentattachment[]', [])
if not hasattr(formfields, "__getitem__"): # A single file was uploaded
formfields = [formfields]
for formfield in formfields[:CFG_WEBCOMMENT_MAX_ATTACHED_FILES]:
if hasattr(formfield, "filename") and formfield.filename:
filename = formfield.filename
dir_to_open = os.path.join(CFG_TMPSHAREDDIR, 'webcomment', str(uid))
try:
assert(dir_to_open.startswith(CFG_TMPSHAREDDIR))
except AssertionError:
register_exception(req=req,
prefix='User #%s tried to upload file to forbidden location: %s' \
% (uid, dir_to_open))
if not os.path.exists(dir_to_open):
try:
os.makedirs(dir_to_open)
except:
register_exception(req=req, alert_admin=True)
## Before saving the file to disc, wash the filename (in particular
## washing away UNIX and Windows (e.g. DFS) paths):
filename = os.path.basename(filename.split('\\')[-1])
filename = filename.strip()
if filename != "":
# Check that file does not already exist
n = 1
while os.path.exists(os.path.join(dir_to_open, filename)):
basedir, name, extension = decompose_file(filename)
new_name = propose_next_docname(name)
filename = new_name + extension
fp = open(os.path.join(dir_to_open, filename), "w")
# FIXME: temporary, waiting for wsgi handler to be
# fixed. Once done, read chunk by chunk
# while formfield.file:
# fp.write(formfield.file.read(10240))
fp.write(formfield.file.read())
fp.close()
# Isn't this file too big?
file_size = os.path.getsize(os.path.join(dir_to_open, filename))
if CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE > 0 and \
file_size > CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE:
os.remove(os.path.join(dir_to_open, filename))
# One file is too big: record that,
# dismiss all uploaded files and re-ask to
# upload again
file_too_big = True
try:
raise InvenioWebCommentWarning(_('The size of file \\"%(x_file)s\\" (%(x_size)s) is larger than maximum allowed file size (%(x_max)s). Select files again.',
x_file=cgi.escape(filename), x_size=str(file_size/1024) + 'KB', x_max=str(CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE/1024) + 'KB'))
except InvenioWebCommentWarning as exc:
register_exception(stream='warning')
warning_msgs.append((exc.message, ''))
#warning_msgs.append(('WRN_WEBCOMMENT_MAX_FILE_SIZE_REACHED', cgi.escape(filename), str(file_size/1024) + 'KB', str(CFG_WEBCOMMENT_MAX_ATTACHMENT_SIZE/1024) + 'KB'))
else:
added_files[filename] = os.path.join(dir_to_open, filename)
if file_too_big:
# One file was too big. Removed all uploaded filed
for filepath in added_files.items():
try:
os.remove(filepath)
except:
# File was already removed or does not exist?
pass
client_ip_address = req.remote_ip
check_warnings = []
(ok, problem) = check_recID_is_in_range(self.recid, check_warnings, argd['ln'])
if ok:
title, description, keywords = websearch_templates.tmpl_record_page_header_content(req,
self.recid,
argd['ln'])
navtrail = create_navtrail_links(cc=guess_primary_collection_of_a_record(self.recid))
if navtrail:
navtrail += ' > '
navtrail += '<a class="navtrail" href="%s/%s/%s?ln=%s">'% (CFG_SITE_URL, CFG_SITE_RECORD, self.recid, argd['ln'])
navtrail += cgi.escape(title)
navtrail += '</a>'
navtrail += '> <a class="navtrail" href="%s/%s/%s/%s/?ln=%s">%s</a>' % (CFG_SITE_URL,
CFG_SITE_RECORD,
self.recid,
self.discussion==1 and 'reviews' or 'comments',
argd['ln'],
self.discussion==1 and _('Reviews') or _('Comments'))
if argd['action'] not in actions:
argd['action'] = 'DISPLAY'
if not argd['msg']:
# User had to login in-between, so retrieve msg
# from cookie
try:
(kind, cookie_argd) = mail_cookie_check_common(argd['cookie'],
delete=True)
argd.update(cookie_argd)
except InvenioWebAccessMailCookieDeletedError as e:
return redirect_to_url(req, CFG_SITE_SECURE_URL + '/'+ CFG_SITE_RECORD +'/' + \
str(self.recid) + (self.discussion==1 and \
'/reviews' or '/comments'))
except InvenioWebAccessMailCookieError as e:
# Invalid or empty cookie: continue
pass
subscribe = False
if argd['subscribe'] and \
get_user_subscription_to_discussion(self.recid, uid) == 0:
# User is not already subscribed, and asked to subscribe
subscribe = True
body = perform_request_add_comment_or_remark(recID=self.recid,
ln=argd['ln'],
uid=uid,
action=argd['action'],
msg=argd['msg'],
note=argd['note'],
score=argd['score'],
reviews=self.discussion,
comID=argd['comid'],
client_ip_address=client_ip_address,
editor_type=argd['editor_type'],
can_attach_files=can_attach_files,
subscribe=subscribe,
req=req,
attached_files=added_files,
warnings=warning_msgs)
if self.discussion:
title = _("Add Review")
else:
title = _("Add Comment")
jqueryheader = '''
<script src="%(CFG_SITE_URL)s/vendors/jquery-multifile/jquery.MultiFile.pack.js" type="text/javascript"></script>
''' % {'CFG_SITE_URL': CFG_SITE_URL}
return page(title=title,
body=body,
navtrail=navtrail,
uid=uid,
language=CFG_SITE_LANG,
verbose=1,
req=req,
navmenuid='search',
metaheaderadd=jqueryheader)
# id not in range
else:
return page(title=_("Record Not Found"),
body=problem,
uid=uid,
verbose=1,
req=req,
navmenuid='search')
def vote(self, req, form):
"""
Vote positively or negatively for a comment/review.
@param comid: comment/review id
@param com_value: +1 to vote positively
-1 to vote negatively
@param recid: the id of the record the comment/review is associated with
@param ln: language
@param do: display order hh = highest helpful score, review only
lh = lowest helpful score, review only
hs = highest star score, review only
ls = lowest star score, review only
od = oldest date
nd = newest date
@param ds: display since all= no filtering by date
nd = n days ago
nw = n weeks ago
nm = n months ago
ny = n years ago
where n is a single digit integer between 0 and 9
@param nb: number of results per page
@param p: results page
@param referer: http address of the calling function to redirect to (refresh)
@param reviews: boolean, enabled for reviews, disabled for comments
"""
argd = wash_urlargd(form, {'comid': (int, -1),
'com_value': (int, 0),
'recid': (int, -1),
'do': (str, "od"),
'ds': (str, "all"),
'nb': (int, 100),
'p': (int, 1),
'referer': (str, None)
})
_ = gettext_set_language(argd['ln'])
client_ip_address = req.remote_ip
uid = getUid(req)
user_info = collect_user_info(req)
(auth_code, auth_msg) = check_user_can_view_comments(user_info, self.recid)
if auth_code and user_info['email'] == 'guest':
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
elif auth_code:
return page_not_authorized(req, "../", \
text = auth_msg)
# Check that comment belongs to this recid
if not check_comment_belongs_to_record(argd['comid'], self.recid):
return page_not_authorized(req, "../", \
text = _("Specified comment does not belong to this record"))
# Check that user can access the record
(auth_code, auth_msg) = check_user_can_view_comment(user_info, argd['comid'])
if auth_code:
return page_not_authorized(req, "../", \
text = _("You do not have access to the specified comment"))
# Check that comment is not currently deleted
if is_comment_deleted(argd['comid']):
return page_not_authorized(req, "../", \
text = _("You cannot vote for a deleted comment"),
ln=argd['ln'])
success = perform_request_vote(argd['comid'], client_ip_address, argd['com_value'], uid)
if argd['referer']:
argd['referer'] += "?ln=%s&do=%s&ds=%s&nb=%s&p=%s&voted=%s&" % (
argd['ln'], argd['do'], argd['ds'], argd['nb'], argd['p'], success)
redirect_to_url(req, argd['referer'])
else:
#Note: sent to comments display
referer = "%s/%s/%s/%s?&ln=%s&voted=1"
referer %= (CFG_SITE_SECURE_URL, CFG_SITE_RECORD, self.recid, self.discussion == 1 and 'reviews' or 'comments', argd['ln'])
redirect_to_url(req, referer)
def report(self, req, form):
"""
Report a comment/review for inappropriate content
@param comid: comment/review id
@param recid: the id of the record the comment/review is associated with
@param ln: language
@param do: display order hh = highest helpful score, review only
lh = lowest helpful score, review only
hs = highest star score, review only
ls = lowest star score, review only
od = oldest date
nd = newest date
@param ds: display since all= no filtering by date
nd = n days ago
nw = n weeks ago
nm = n months ago
ny = n years ago
where n is a single digit integer between 0 and 9
@param nb: number of results per page
@param p: results page
@param referer: http address of the calling function to redirect to (refresh)
@param reviews: boolean, enabled for reviews, disabled for comments
"""
argd = wash_urlargd(form, {'comid': (int, -1),
'recid': (int, -1),
'do': (str, "od"),
'ds': (str, "all"),
'nb': (int, 100),
'p': (int, 1),
'referer': (str, None)
})
_ = gettext_set_language(argd['ln'])
client_ip_address = req.remote_ip
uid = getUid(req)
user_info = collect_user_info(req)
(auth_code, auth_msg) = check_user_can_view_comments(user_info, self.recid)
if isGuestUser(uid):
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
elif auth_code:
return page_not_authorized(req, "../", \
text = auth_msg)
# Check that comment belongs to this recid
if not check_comment_belongs_to_record(argd['comid'], self.recid):
return page_not_authorized(req, "../", \
text = _("Specified comment does not belong to this record"))
# Check that user can access the record
(auth_code, auth_msg) = check_user_can_view_comment(user_info, argd['comid'])
if auth_code:
return page_not_authorized(req, "../", \
text = _("You do not have access to the specified comment"))
# Check that comment is not currently deleted
if is_comment_deleted(argd['comid']):
return page_not_authorized(req, "../", \
text = _("You cannot report a deleted comment"),
ln=argd['ln'])
success = perform_request_report(argd['comid'], client_ip_address, uid)
if argd['referer']:
argd['referer'] += "?ln=%s&do=%s&ds=%s&nb=%s&p=%s&reported=%s&" % (argd['ln'], argd['do'], argd['ds'], argd['nb'], argd['p'], str(success))
redirect_to_url(req, argd['referer'])
else:
#Note: sent to comments display
referer = "%s/%s/%s/%s/display?ln=%s&voted=1"
referer %= (CFG_SITE_SECURE_URL, CFG_SITE_RECORD, self.recid, self.discussion==1 and 'reviews' or 'comments', argd['ln'])
redirect_to_url(req, referer)
def subscribe(self, req, form):
"""
Subscribe current user to receive email notification when new
comments are added to current discussion.
"""
argd = wash_urlargd(form, {'referer': (str, None)})
uid = getUid(req)
user_info = collect_user_info(req)
(auth_code, auth_msg) = check_user_can_view_comments(user_info, self.recid)
if isGuestUser(uid):
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
elif auth_code:
return page_not_authorized(req, "../", \
text = auth_msg)
success = subscribe_user_to_discussion(self.recid, uid)
display_url = "%s/%s/%s/comments/display?subscribed=%s&ln=%s" % \
(CFG_SITE_SECURE_URL, CFG_SITE_RECORD, self.recid, str(success), argd['ln'])
redirect_to_url(req, display_url)
def unsubscribe(self, req, form):
"""
Unsubscribe current user from current discussion.
"""
argd = wash_urlargd(form, {'referer': (str, None)})
user_info = collect_user_info(req)
uid = getUid(req)
if isGuestUser(uid):
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
success = unsubscribe_user_from_discussion(self.recid, uid)
display_url = "%s/%s/%s/comments/display?subscribed=%s&ln=%s" % \
(CFG_SITE_SECURE_URL, CFG_SITE_RECORD, self.recid, str(-success), argd['ln'])
redirect_to_url(req, display_url)
def toggle(self, req, form):
"""
Store the visibility of a comment for current user
"""
argd = wash_urlargd(form, {'comid': (int, -1),
'referer': (str, None),
'collapse': (int, 1)})
uid = getUid(req)
if isGuestUser(uid):
# We do not store information for guests
return ''
toggle_comment_visibility(uid, argd['comid'], argd['collapse'], self.recid)
if argd['referer']:
return redirect_to_url(req, CFG_SITE_SECURE_URL + \
(not argd['referer'].startswith('/') and '/' or '') + \
argd['referer'] + '#' + str(argd['comid']))
class WebInterfaceCommentsFiles(WebInterfaceDirectory):
"""Handle <strike>upload and </strike> access to files for comments.
<strike>The upload is currently only available through the Ckeditor.</strike>
"""
#_exports = ['put'] # 'get' is handled by _lookup(..)
def __init__(self, recid=-1, reviews=0):
self.recid = recid
self.discussion = reviews # 0:comments, 1:reviews
def _lookup(self, component, path):
""" This handler is invoked for the dynamic URLs (for getting
<strike>and putting attachments</strike>) Eg:
CFG_SITE_URL/CFG_SITE_RECORD/5953/comments/attachments/get/652/myfile.pdf
"""
if component == 'get' and len(path) > 1:
comid = path[0] # comment ID
file_name = '/'.join(path[1:]) # the filename
def answer_get(req, form):
"""Accessing files attached to comments."""
form['file'] = file_name
form['comid'] = comid
return self._get(req, form)
return answer_get, []
# All other cases: file not found
return None, []
def _get(self, req, form):
"""
Returns a file attached to a comment.
Example:
CFG_SITE_URL/CFG_SITE_RECORD/5953/comments/attachments/get/652/myfile.pdf
where 652 is the comment ID
"""
argd = wash_urlargd(form, {'file': (str, None),
'comid': (int, 0)})
_ = gettext_set_language(argd['ln'])
# Can user view this record, i.e. can user access its
# attachments?
uid = getUid(req)
user_info = collect_user_info(req)
# Check that user can view record, and its comments (protected
# with action "viewcomment")
(auth_code, auth_msg) = check_user_can_view_comments(user_info, self.recid)
if auth_code and user_info['email'] == 'guest':
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target, norobot=True)
elif auth_code:
return page_not_authorized(req, "../", \
text = auth_msg)
# Does comment exist?
if not query_get_comment(argd['comid']):
req.status = apache.HTTP_NOT_FOUND
return page(title=_("Page Not Found"),
body=_('The requested comment could not be found'),
req=req)
# Check that user can view this particular comment, protected
# using its own restriction
(auth_code, auth_msg) = check_user_can_view_comment(user_info, argd['comid'])
if auth_code and user_info['email'] == 'guest':
cookie = mail_cookie_create_authorize_action(VIEWRESTRCOLL, {'collection' : guess_primary_collection_of_a_record(self.recid)})
target = CFG_SITE_SECURE_URL + '/youraccount/login' + \
make_canonical_urlargd({'action': cookie, 'ln' : argd['ln'], 'referer' : \
CFG_SITE_SECURE_URL + user_info['uri']}, {})
return redirect_to_url(req, target)
elif auth_code:
return page_not_authorized(req, "../", \
text = auth_msg,
ln=argd['ln'])
# Check that comment is not currently deleted
if is_comment_deleted(argd['comid']):
return page_not_authorized(req, "../", \
text = _("You cannot access files of a deleted comment"),
ln=argd['ln'])
if not argd['file'] is None:
# Prepare path to file on disk. Normalize the path so that
# ../ and other dangerous components are removed.
path = os.path.abspath(CFG_PREFIX + '/var/data/comments/' + \
str(self.recid) + '/' + str(argd['comid']) + \
'/' + argd['file'])
# Check that we are really accessing attachements
# directory, for the declared record.
if path.startswith(CFG_PREFIX + '/var/data/comments/' + \
str(self.recid)) and \
os.path.exists(path):
return stream_file(req, path)
# Send error 404 in all other cases
req.status = apache.HTTP_NOT_FOUND
return page(title=_("Page Not Found"),
body=_('The requested file could not be found'),
req=req,
language=argd['ln'])
class WebInterfaceYourCommentsPages(WebInterfaceDirectory):
"""Defines the set of /yourcomments pages."""
_exports = ['', ]
def index(self, req, form):
"""Index page."""
argd = wash_urlargd(form, {'page': (int, 1),
'format': (str, "rc"),
'order_by': (str, "lcf"),
'per_page': (str, "all"),
})
# TODO: support also "reviews", by adding new option to show/hide them if needed
uid = getUid(req)
# load the right language
_ = gettext_set_language(argd['ln'])
# Is site ready to accept comments?
if not CFG_WEBCOMMENT_ALLOW_COMMENTS or CFG_ACCESS_CONTROL_LEVEL_SITE >= 1:
return page_not_authorized(req, "%s/yourcomments" % \
(CFG_SITE_SECURE_URL,),
text="Comments are currently disabled on this site",
navmenuid="yourcomments")
elif uid == -1 or isGuestUser(uid):
return redirect_to_url(req, "%s/youraccount/login%s" % (
CFG_SITE_SECURE_URL,
make_canonical_urlargd({
'referer' : "%s/yourcomments%s" % (
CFG_SITE_SECURE_URL,
make_canonical_urlargd(argd, {})),
"ln" : argd['ln']}, {})))
user_info = collect_user_info(req)
if not user_info['precached_sendcomments']:
# Maybe we should still authorize if user submitted
# comments in the past?
return page_not_authorized(req, "../", \
text = _("You are not authorized to use comments."))
return page(title=_("Your Comments"),
body=perform_display_your_comments(user_info,
page_number=argd['page'],
selected_order_by_option=argd['order_by'],
selected_display_number_option=argd['per_page'],
selected_display_format_option=argd['format'],
ln=argd['ln']),
navtrail= """<a class="navtrail" href="%(sitesecureurl)s/youraccount/display?ln=%(ln)s">%(account)s</a>""" % {
'sitesecureurl' : CFG_SITE_SECURE_URL,
'ln': argd['ln'],
'account' : _("Your Account"),
},
description=_("%(x_name)s View your previously submitted comments", x_name=CFG_SITE_NAME_INTL.get(argd['ln'], CFG_SITE_NAME)),
keywords=_("%(x_name)s, personalize", x_name=CFG_SITE_NAME_INTL.get(argd['ln'], CFG_SITE_NAME)),
uid=uid,
language=argd['ln'],
req=req,
lastupdated=__lastupdated__,
navmenuid='youralerts',
secure_page_p=1)
# Return the same page wether we ask for /CFG_SITE_RECORD/123 or /CFG_SITE_RECORD/123/
__call__ = index
|
jirikuncar/invenio
|
invenio/legacy/webcomment/webinterface.py
|
Python
|
gpl-2.0
| 47,121
|
"""PostProcessor for serving reveal.js HTML slideshows."""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import os
import webbrowser
from tornado import web, ioloop, httpserver
from tornado.httpclient import AsyncHTTPClient
from traitlets import Bool, Unicode, Int
from .base import PostProcessorBase
class ProxyHandler(web.RequestHandler):
"""handler the proxies requests from a local prefix to a CDN"""
@web.asynchronous
def get(self, prefix, url):
"""proxy a request to a CDN"""
proxy_url = "/".join([self.settings['cdn'], url])
client = self.settings['client']
client.fetch(proxy_url, callback=self.finish_get)
def finish_get(self, response):
"""finish the request"""
# rethrow errors
response.rethrow()
for header in ["Content-Type", "Cache-Control", "Date", "Last-Modified", "Expires"]:
if header in response.headers:
self.set_header(header, response.headers[header])
self.finish(response.body)
class ServePostProcessor(PostProcessorBase):
"""Post processor designed to serve files
Proxies reveal.js requests to a CDN if no local reveal.js is present
"""
open_in_browser = Bool(True, config=True,
help="""Should the browser be opened automatically?"""
)
reveal_cdn = Unicode("https://cdn.jsdelivr.net/reveal.js/2.6.2", config=True,
help="""URL for reveal.js CDN."""
)
reveal_prefix = Unicode("reveal.js", config=True, help="URL prefix for reveal.js")
ip = Unicode("127.0.0.1", config=True, help="The IP address to listen on.")
port = Int(8000, config=True, help="port for the server to listen on.")
def postprocess(self, input):
"""Serve the build directory with a webserver."""
dirname, filename = os.path.split(input)
handlers = [
(r"/(.+)", web.StaticFileHandler, {'path' : dirname}),
(r"/", web.RedirectHandler, {"url": "/%s" % filename})
]
if ('://' in self.reveal_prefix or self.reveal_prefix.startswith("//")):
# reveal specifically from CDN, nothing to do
pass
elif os.path.isdir(os.path.join(dirname, self.reveal_prefix)):
# reveal prefix exists
self.log.info("Serving local %s", self.reveal_prefix)
else:
self.log.info("Redirecting %s requests to %s", self.reveal_prefix, self.reveal_cdn)
handlers.insert(0, (r"/(%s)/(.*)" % self.reveal_prefix, ProxyHandler))
app = web.Application(handlers,
cdn=self.reveal_cdn,
client=AsyncHTTPClient(),
)
# hook up tornado logging to our logger
try:
from tornado import log
log.app_log = self.log
except ImportError:
# old tornado (<= 3), ignore
pass
http_server = httpserver.HTTPServer(app)
http_server.listen(self.port, address=self.ip)
url = "http://%s:%i/%s" % (self.ip, self.port, filename)
print("Serving your slides at %s" % url)
print("Use Control-C to stop this server")
if self.open_in_browser:
webbrowser.open(url, new=2)
try:
ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
print("\nInterrupted")
def main(path):
"""allow running this module to serve the slides"""
server = ServePostProcessor()
server(path)
if __name__ == '__main__':
import sys
main(sys.argv[1])
|
bdh1011/wau
|
venv/lib/python2.7/site-packages/nbconvert/postprocessors/serve.py
|
Python
|
mit
| 3,653
|
import json
from collections import namedtuple
import fauxfactory
import pytest
from riggerlib import recursive_update
from widgetastic.utils import partial_match
from cfme.cloud.provider.openstack import OpenStackProvider
from cfme.fixtures.provider import setup_or_skip
from cfme.infrastructure.provider.rhevm import RHEVMProvider
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.utils.generators import random_vm_name
from cfme.utils.log import logger
from cfme.utils.version import Version
from cfme.utils.version import VersionPicker
from cfme.v2v.infrastructure_mapping import InfrastructureMapping as InfraMapping
FormDataVmObj = namedtuple("FormDataVmObj", ["infra_mapping_data", "vm_list"])
V2vProviders = namedtuple("V2vProviders", ["vmware_provider", "rhv_provider", "osp_provider"])
@pytest.fixture(scope="module")
def v2v_provider_setup(request, appliance, source_provider, provider):
""" Fixture to setup providers """
vmware_provider, rhv_provider, osp_provider = None, None, None
for v2v_provider in [source_provider, provider]:
if v2v_provider.one_of(VMwareProvider):
vmware_provider = v2v_provider
setup_or_skip(request, vmware_provider)
elif v2v_provider.one_of(RHEVMProvider):
rhv_provider = v2v_provider
setup_or_skip(request, rhv_provider)
elif v2v_provider.one_of(OpenStackProvider):
osp_provider = v2v_provider
setup_or_skip(request, osp_provider)
else:
pytest.skip("Provider {} is not a valid provider for v2v tests".format(provider.name))
v2v_providers = V2vProviders(vmware_provider=vmware_provider,
rhv_provider=rhv_provider,
osp_provider=osp_provider)
# Transformation method can be vddk or ssh
if hasattr(request, "param") and request.param == "SSH":
transformation_method = "SSH"
else:
transformation_method = "VDDK"
# set host credentials for Vmware and RHEV hosts
host_credentials(appliance, transformation_method, v2v_providers)
yield v2v_providers
for v2v_provider in v2v_providers:
if v2v_provider is not None:
v2v_provider.delete_if_exists(cancel=False)
def host_credentials(appliance, transformation_method, v2v_providers):
""" Sets up host credentials for vmware and rhv providers
for RHEV migration.
For migration with OSP only vmware(source) provider
host credentials need to be added.
These credentials are automatically removed once the
provider is deleted in clean up.
Args:
appliance
transformation_method : vddk or ssh to be used in configuring conversion host
v2v_providers: vmware (and rhev in case of RHV migration ) , osp not needed.
"""
provider_list = [v2v_providers.vmware_provider]
rhv_hosts = None
if v2v_providers.rhv_provider is not None:
rhv_hosts = v2v_providers.rhv_provider.hosts.all()
provider_list.append(v2v_providers.rhv_provider)
try:
for v2v_provider in provider_list:
hosts = v2v_provider.hosts.all()
for host in hosts:
host_data = [data for data in v2v_provider.data['hosts']
if data['name'] == host.name]
if not host_data:
pytest.skip("No host data")
host.update_credentials_rest(credentials=host_data[0]['credentials'])
except Exception:
logger.exception("Exception when trying to add the host credentials.")
pytest.skip("No data for hosts in providers, failed to retrieve hosts and add creds.")
# Configure conversion host for RHEV migration
if rhv_hosts is not None:
set_conversion_instance_for_rhev(appliance, transformation_method, rhv_hosts)
if v2v_providers.osp_provider is not None:
set_conversion_instance_for_osp(appliance, v2v_providers.osp_provider,
transformation_method)
def _tag_cleanup(host_obj, tag1, tag2):
"""
Clean Up Tags
Returns: Boolean True if all Tags were removed/cleaned
or False means all required Tags are present on host.
"""
def extract_tag(tag):
# Following strip will remove extra asterisk from tag assignment
return tag.category.display_name.strip(" *"), tag.display_name
valid_tags = {extract_tag(tag1), extract_tag(tag2)}
tags = host_obj.get_tags()
tags_set = set(map(extract_tag, tags))
# we always neeed 2 tags for migration, if total is less than 2
# don't bother checking what tag was it, just remove it and
# then add all required tags via add_tags() call. or if tags on host
# are not subset of valid tags, we still remove them.
if len(tags_set) < 2 or not tags_set.issubset(valid_tags):
host_obj.remove_tags(tags=tags)
return True
return False
def create_tags(appliance, transformation_method):
"""
Create tags V2V - Transformation Host * and V2V - Transformation Method
Args:
appliance:
transformation_method: VDDK/SSH
"""
# t is for True in V2V - Transformation Host * tag
tag1 = appliance.collections.categories.instantiate(
display_name="V2V - Transformation Host *"
).collections.tags.instantiate(display_name="t")
tag2 = appliance.collections.categories.instantiate(
display_name="V2V - Transformation Method"
).collections.tags.instantiate(display_name=transformation_method)
return tag1, tag2
def set_conversion_instance_for_rhev(appliance, transformation_method, rhev_hosts):
"""Assigning tags to conversion host.
In 5.10 rails console commands are run to configure all the rhev hosts.
Args:
appliance:
transformation_method : vddk or ssh as per test requirement
rhev_hosts: hosts in rhev to configure for conversion
"""
for host in rhev_hosts:
# set conversion host via rails console
# Delete all prior conversion hosts otherwise it creates duplicate entries
delete_hosts = appliance.ssh_client.run_rails_command("'ConversionHost.delete_all'")
if not delete_hosts.success:
pytest.skip("Failed to delete all conversion hosts:".format(delete_hosts.output))
set_conv_host = appliance.ssh_client.run_rails_command(
"'r = Host.find_by(name:{host});\
c_host = ConversionHost.create(name:{host},resource:r);\
c_host.{method}_transport_supported = true;\
c_host.save'".format(host=json.dumps(host.name),
method=transformation_method.lower())
)
if not set_conv_host.success:
pytest.skip("Failed to set conversion hosts:".format(set_conv_host.output))
def set_conversion_instance_for_osp(appliance, osp_provider, transformation_method='vddk'):
"""
Rails console command
====================
res = Vm.find_by(name: 'my_osp_instance')
conversion_host = ConversionHost.create(name: res.name, resource: res)
conversion_host.vddk_transport_supported = true
conversion_host.save
Args:
appliance
transformation_method: vddk or ssh
osp_provider: OSP
"""
# Delete all prior conversion hosts otherwise it creates duplicate entries
delete_hosts = appliance.ssh_client.run_rails_command("'ConversionHost.delete_all'")
if not delete_hosts.success:
pytest.skip("Failed to delete all conversion hosts:".format(delete_hosts.output))
# transformation method needs to be lower case always
trans_method = transformation_method.lower()
try:
conversion_instances = osp_provider.data['conversion_instances'][trans_method]
except KeyError:
pytest.skip("No conversion instance on provider.")
for instance in conversion_instances:
set_conv_host = appliance.ssh_client.run_rails_command(
"'r = Vm.find_by(name:{vm});\
c_host = ConversionHost.create(name:r.name, resource: r);\
c_host.{method}_transport_supported = true;\
c_host.save'".format(
vm=json.dumps(instance),
method=transformation_method.lower(),
)
)
if not set_conv_host.success:
pytest.skip("Failed to set conversion hosts:".format(set_conv_host.output))
def get_vm(request, appliance, source_provider, template, datastore='nfs'):
""" Helper method that takes template , source provider and datastore
and creates VM on source provider to migrate .
Args:
request
appliance:
source_provider: Provider on which vm is created
template: Template used for creating VM
datastore: datastore in which VM is created. If no datastore
is provided then by default VM is created on nfs datastore
returns: Vm object
"""
source_datastores_list = source_provider.data.get("datastores", [])
source_datastore = [d.name for d in source_datastores_list if d.type == datastore][0]
collection = source_provider.appliance.provider_based_collection(source_provider)
vm_name = random_vm_name("v2v-auto")
vm_obj = collection.instantiate(
vm_name, source_provider, template_name=template(source_provider)["name"]
)
power_on_vm = True
if template.__name__ == "win10_template":
# Need to leave this off, otherwise migration fails
# because when migration process tries to power off the VM if it is powered off
# and for win10, it hibernates and that state of filesystem is unsupported
power_on_vm = False
vm_obj.create_on_provider(
timeout=2400,
find_in_cfme=True,
allow_skip="default",
datastore=source_datastore,
power_on=power_on_vm,
)
request.addfinalizer(lambda: vm_obj.cleanup_on_provider())
return vm_obj
def get_data(provider, component, default_value):
try:
data = (provider.data.get(component, [])[0])
except IndexError:
data = default_value
return data
def infra_mapping_default_data(source_provider, provider):
"""
Default data for infrastructure mapping form.
It is used in other methods to recursive update the data according
to parameters in tests.
Args:
source_provider: Vmware provider
provider: Target rhev/OSP provider
"""
plan_type = VersionPicker({Version.lowest(): None,
"5.10": "rhv" if provider.one_of(RHEVMProvider) else "osp"}).pick()
infra_mapping_data = {
"name": "infra_map_{}".format(fauxfactory.gen_alphanumeric()),
"description": "Single Datastore migration of VM from {ds_type1} to {ds_type2}".format(
ds_type1="nfs", ds_type2="nfs"
),
"plan_type": plan_type,
"clusters": [component_generator("clusters", source_provider, provider)],
"datastores": [component_generator(
"datastores", source_provider, provider,
get_data(source_provider, "datastores", "nfs").type,
get_data(provider, "datastores", "nfs").type)],
"networks": [
component_generator("vlans", source_provider, provider,
get_data(source_provider, "vlans", "VM Network"),
get_data(provider, "vlans", "ovirtmgmt"))
],
}
return infra_mapping_data
@pytest.fixture(scope="function")
def mapping_data_multiple_vm_obj_single_datastore(request, appliance, source_provider, provider):
# this fixture will take list of N VM templates via request and call get_vm for each
cluster = provider.data.get("clusters", [False])[0]
if not cluster:
pytest.skip("No data for cluster available on provider.")
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Single Datastore migration of VM from {ds_type1} to {ds_type2},".format(
ds_type1=request.param[0], ds_type2=request.param[1]
),
"networks": [
component_generator("vlans", source_provider, provider, "VM Network", "ovirtmgmt")
],
},
)
vm_list = []
for template_name in request.param[2]:
vm_list.append(get_vm(request, appliance, source_provider, template_name))
return FormDataVmObj(infra_mapping_data=infra_mapping_data, vm_list=vm_list)
@pytest.fixture(scope="function")
def mapping_data_single_datastore(request, source_provider, provider):
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Single Datastore migration of VM from {ds_type1} to {ds_type2},".format(
ds_type1=request.param[0], ds_type2=request.param[1]
),
"datastores": [
component_generator(
"datastores", source_provider, provider, request.param[0], request.param[1]
)
],
},
)
return infra_mapping_data
@pytest.fixture(scope="function")
def mapping_data_single_network(request, source_provider, provider):
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Single Network migration of VM from {vlan1} to {vlan2},".format(
vlan1=request.param[0], vlan2=request.param[1]
),
"networks": [
component_generator(
"vlans", source_provider, provider, request.param[0], request.param[1]
)
],
},
)
return infra_mapping_data
@pytest.fixture(scope="function")
def edited_mapping_data(request, source_provider, provider):
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
edited_form_data = {
"description": "my edited description",
"clusters": {},
"datastores": {},
"networks": [
component_generator(
"vlans", source_provider, provider, request.param[1][0], request.param[1][1]
)
],
}
return infra_mapping_data, edited_form_data
@pytest.fixture(scope="function")
def mapping_data_dual_vm_obj_dual_datastore(request, appliance, source_provider, provider):
vmware_nw = source_provider.data.get("vlans", [None])[0]
rhvm_nw = provider.data.get("vlans", [None])[0]
cluster = provider.data.get("clusters", [False])[0]
if not vmware_nw or not rhvm_nw or not cluster:
pytest.skip("No data for source or target network in providers.")
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Dual DS migration of VM from {dss1} to {dst1},& from {dss2} to {dst2}".
format(dss1=request.param[0][0],
dst1=request.param[0][1],
dss2=request.param[1][0],
dst2=request.param[1][1]),
"datastores": [
component_generator(
"datastores",
source_provider,
provider,
request.param[0][0],
request.param[0][1],
),
component_generator(
"datastores",
source_provider,
provider,
request.param[1][0],
request.param[1][1],
),
],
"networks": [
component_generator(
"vlans",
source_provider,
provider,
source_provider.data.get("vlans")[0],
provider.data.get("vlans")[0],
)
],
},
)
# creating 2 VMs on two different datastores and returning its object list
vm_obj1 = get_vm(request, appliance, source_provider, request.param[0][2], request.param[0][0])
vm_obj2 = get_vm(request, appliance, source_provider, request.param[1][2], request.param[1][0])
return FormDataVmObj(infra_mapping_data=infra_mapping_data, vm_list=[vm_obj1, vm_obj2])
@pytest.fixture(scope="function")
def mapping_data_vm_obj_dual_nics(request, appliance, source_provider, provider):
vmware_nw = source_provider.data.get("vlans", [None])[0]
rhvm_nw = provider.data.get("vlans", [None])[0]
cluster = provider.data.get("clusters", [False])[0]
if not vmware_nw or not rhvm_nw or not cluster:
pytest.skip("No data for source or target network in providers.")
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Dual DS migration of VM from {dss1} to {dst1},& from {dss2} to {dst2}".
format(dss1=request.param[0][0],
dst1=request.param[0][1],
dss2=request.param[1][0],
dst2=request.param[1][1]),
"networks": [
component_generator(
"vlans", source_provider, provider, request.param[0][0], request.param[0][1]
),
component_generator(
"vlans", source_provider, provider, request.param[1][0], request.param[1][1]
),
],
},
)
vm_obj = get_vm(request, appliance, source_provider, request.param[2])
return FormDataVmObj(infra_mapping_data=infra_mapping_data, vm_list=[vm_obj])
@pytest.fixture(scope="function")
def mapping_data_vm_obj_single_datastore(request, appliance, source_provider, provider):
"""Return Infra Mapping form data and vm object"""
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Single DS migration of VM from {ds_type1} to {ds_type2},".format(
ds_type1=request.param[0], ds_type2=request.param[1]
),
"datastores": [
component_generator(
"datastores", source_provider, provider, request.param[0], request.param[1]
)
],
},
)
vm_obj = get_vm(request, appliance, source_provider, request.param[2], request.param[0])
return FormDataVmObj(infra_mapping_data=infra_mapping_data, vm_list=[vm_obj])
@pytest.fixture(scope="function")
def mapping_data_vm_obj_single_network(request, appliance, source_provider, provider):
infra_mapping_data = infra_mapping_default_data(source_provider, provider)
recursive_update(
infra_mapping_data,
{
"description": "Single Network migration of VM from {vlan1} to {vlan2},".format(
vlan1=request.param[0], vlan2=request.param[1]
),
"networks": [
component_generator(
"vlans", source_provider, provider, request.param[0], request.param[1]
)
],
},
)
vm_obj = get_vm(request, appliance, source_provider, request.param[2])
return FormDataVmObj(infra_mapping_data=infra_mapping_data, vm_list=[vm_obj])
def component_generator(selector, source_provider, provider, source_type=None, target_type=None):
"""
Component generator method to generate a dict of source and target
components(clusters/datastores/networks).
Gets the provider data based on selector from cfme_data.yaml and creates
InfraMapping.component(source_list, target_list) object
Test is skipped if no source or target data is found
Args:
selector: can be clusters/datastores/vlans
source_provider: vmware provider to migrate from
provider: rhev or osp provider or target provider to migrate to
source_type: string source datastores/networks on vmware provider to migrate from.
Ex: if source_type is "iscsi". Provider data is checked for datastore with type
iscsi and that datastores name is used.
target_type: string target datastores/networks to migrate to
returns : InfraMapping.component(source_list, target_list) object
"""
if selector not in ['clusters', 'datastores', 'vlans']:
raise ValueError("Please specify cluster, datastore or network(vlans) selector!")
source_data = source_provider.data.get(selector, [])
target_data = provider.data.get(selector, [])
if not (source_data and target_data):
pytest.skip("No source and target data")
if selector == "clusters":
sources = source_data or None
targets = target_data or None
component = InfraMapping.ClusterComponent(
[partial_match(sources[0])], [partial_match(targets[0])]
)
elif selector == "datastores":
# Ignoring target_type for osp and setting new value
if provider.one_of(OpenStackProvider):
target_type = "volume"
sources = [d.name for d in source_data if d.type == source_type]
targets = [d.name for d in target_data if d.type == target_type]
component = InfraMapping.DatastoreComponent(
[partial_match(sources[0])], [partial_match(targets[0])]
)
else:
sources = [v for v in source_data if v == source_type]
targets = [v for v in target_data if v == target_type]
component = InfraMapping.NetworkComponent(
[partial_match(sources[0])], [partial_match(targets[0])]
)
skip_test = not (sources and targets and component)
if skip_test:
pytest.skip("No data for source or target {} in providers.".format(selector))
return component
|
RedHatQE/cfme_tests
|
cfme/fixtures/v2v_fixtures.py
|
Python
|
gpl-2.0
| 22,048
|
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Author: Cyril Jaquier
#
# $Revision: 690 $
__author__ = "Cyril Jaquier"
__version__ = "$Revision: 690 $"
__date__ = "$Date: 2008-05-12 10:34:42 +0200 (Mon, 12 May 2008) $"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier"
__license__ = "GPL"
import logging, re, glob
from configreader import ConfigReader
from filterreader import FilterReader
from actionreader import ActionReader
# Gets the instance of the logger.
logSys = logging.getLogger("fail2ban.client.config")
class JailReader(ConfigReader):
actionCRE = re.compile("^((?:\w|-|_|\.)+)(?:\[(.*)\])?$")
def __init__(self, name):
ConfigReader.__init__(self)
self.__name = name
self.__filter = None
self.__actions = list()
def setName(self, value):
self.__name = value
def getName(self):
return self.__name
def read(self):
ConfigReader.read(self, "jail")
def isEnabled(self):
return self.__opts["enabled"]
def getOptions(self):
opts = [["bool", "enabled", "false"],
["string", "logpath", "/var/log/messages"],
["string", "backend", "auto"],
["int", "maxretry", 3],
["int", "findtime", 600],
["int", "bantime", 600],
["string", "failregex", None],
["string", "ignoreregex", None],
["string", "ignoreip", None],
["string", "filter", ""],
["string", "action", ""]]
self.__opts = ConfigReader.getOptions(self, self.__name, opts)
if self.isEnabled():
# Read filter
self.__filter = FilterReader(self.__opts["filter"], self.__name)
ret = self.__filter.read()
if ret:
self.__filter.getOptions(self.__opts)
else:
logSys.error("Unable to read the filter")
return False
# Read action
for act in self.__opts["action"].split('\n'):
try:
splitAct = JailReader.splitAction(act)
action = ActionReader(splitAct, self.__name)
ret = action.read()
if ret:
action.getOptions(self.__opts)
self.__actions.append(action)
else:
raise AttributeError("Unable to read action")
except Exception, e:
logSys.error("Error in action definition " + act)
logSys.debug(e)
return False
return True
def convert(self):
stream = []
for opt in self.__opts:
if opt == "logpath":
for path in self.__opts[opt].split("\n"):
pathList = glob.glob(path)
if len(pathList) == 0:
logSys.error("No file found for " + path)
for p in pathList:
stream.append(["set", self.__name, "addlogpath", p])
elif opt == "backend":
backend = self.__opts[opt]
elif opt == "maxretry":
stream.append(["set", self.__name, "maxretry", self.__opts[opt]])
elif opt == "ignoreip":
for ip in self.__opts[opt].split():
# Do not send a command if the rule is empty.
if ip != '':
stream.append(["set", self.__name, "addignoreip", ip])
elif opt == "findtime":
stream.append(["set", self.__name, "findtime", self.__opts[opt]])
elif opt == "bantime":
stream.append(["set", self.__name, "bantime", self.__opts[opt]])
elif opt == "failregex":
stream.append(["set", self.__name, "failregex", self.__opts[opt]])
elif opt == "ignoreregex":
for regex in self.__opts[opt].split('\n'):
# Do not send a command if the rule is empty.
if regex != '':
stream.append(["set", self.__name, "addignoreregex", regex])
stream.extend(self.__filter.convert())
for action in self.__actions:
stream.extend(action.convert())
stream.insert(0, ["add", self.__name, backend])
return stream
#@staticmethod
def splitAction(action):
m = JailReader.actionCRE.match(action)
d = dict()
if not m.group(2) == None:
# Huge bad hack :( This method really sucks. TODO Reimplement it.
actions = ""
escapeChar = None
allowComma = False
for c in m.group(2):
if c in ('"', "'") and not allowComma:
# Start
escapeChar = c
allowComma = True
elif c == escapeChar:
# End
escapeChar = None
allowComma = False
else:
if c == ',' and allowComma:
actions += "<COMMA>"
else:
actions += c
# Split using ,
actionsSplit = actions.split(',')
# Replace the tag <COMMA> with ,
actionsSplit = [n.replace("<COMMA>", ',') for n in actionsSplit]
for param in actionsSplit:
p = param.split('=')
try:
d[p[0].strip()] = p[1].strip()
except IndexError:
logSys.error("Invalid argument %s in '%s'" % (p, m.group(2)))
return [m.group(1), d]
splitAction = staticmethod(splitAction)
|
carlgao/lenga
|
images/lenny64-peon/usr/share/fail2ban/client/jailreader.py
|
Python
|
mit
| 5,180
|
#!python3
from help_in_test import tests
import import_from_parent_dir
#The following line needs to be changed
import largest_prime_factor as cur_module
list_funcs = [x for x in dir(cur_module)
if x.startswith('_') is False]
for i in list_funcs:
f = getattr(cur_module, i)
#The following tests need to be changed
test1 = True
list_tests = [test1]
#The number in following function call needs to be changed
tests(f, list_tests, num_args = 1)
|
anshbansal/general
|
Python3/functions/tests/test_largest_prime_factor.py
|
Python
|
mit
| 490
|
<<<<<<< HEAD
<<<<<<< HEAD
from __future__ import unicode_literals
import unittest
if __name__ == "__main__":
unittest.main()
=======
from __future__ import unicode_literals
import unittest
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
from __future__ import unicode_literals
import unittest
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
|
ArcherSys/ArcherSys
|
Lib/test/test_future4.py
|
Python
|
mit
| 455
|
#
# This file contains functions and constants to talk
# to and from a Novation Launchpad via MIDI.
#
# Created by paul for mididings.
from mididings import *
# MEASURES - constants useful for the Pad
side = list(range(0, 8))
longside = list(range(0, 9))
step = 16 # vertical gap on pad
FirstCtrl = 104 # ctrl of first toprow key
# COLORS
# Colors on the Launchpad are determined by event velocity/value.
# Each key can be lit with red or green light (or both),
# with brightness 0 (off) - 3 (max).
# For convenience, define also the constants:
black = 4 # better not to use zero
red = 3
orange = 19
green = 48
yellow = 51 # better not to use 127
# If you want a darker variant of the above, use fractions (thirds).
# For example, green*2/3 is darker green. (Not for orange!)
def color(g, r):
"This gives the Launchpad color given the amount of green and red."
if g + r == 0:
return black # not zero
else:
return (16 * g) + r
# KEYS
# Each key on the Launchpad is activated by a MIDI event.
# The square keys and the right keys are notes,
# the top keys are control events.
# Rows and columns given the keys (starting from 0)
def row(x):
"This tells the row of the event (square or right)"
return x // step
def column(x):
"This tells us the column of event (right = 8)"
return x % step
def topcol(x):
"The same as colums, but for the top row"
return x - FirstCtrl
# Now the inverses: functions that point exactly to a key on the Launchpad
def right(row):
"This gives the note of a right key at position row"
return (row * step) + 8
def square(row, col):
"This gives the note of a square key at position row,col"
return (row * step) + col
def top(col):
"This gives the ctrl of a top key at position col"
return col + FirstCtrl
# KEY FILTERS
# First filters for notes from square, top, and right keys.
OnlySquare = Filter(NOTE) >> KeyFilter(notes=[square(i, j)
for i in side for j in side])
OnlyRight = KeyFilter(notes=[right(i) for i in side])
OnlyTop = Filter(CTRL) >> CtrlFilter(FirstCtrl + i for i in side)
# Now filters for rows, colums, and single keys.
def RowSqFilter(row):
"This selects only notes from specified row"
return KeyFilter(row * step, right(row)) # no right
def RowFilter(row):
"This selects only notes from specified row"
return KeyFilter(row * step, right(row) + 1) # also right
def ColumnFilter(col):
"This selects only notes from specified column"
return KeyFilter(notes=[square(i, col) for i in side])
def TopFilter(col):
"This selects only specified key from top row"
return CtrlFilter(top(col))
def RightFilter(row):
"This selects only specified key from right"
return KeyFilter(right(row))
def SquareFilter(row, col):
"This selects only specified key from square"
return KeyFilter(square(row, col))
# KEY GENERATORS
def SquareKey(row, col):
"This creates square note with given row and column"
return Key(square(row, col))
def RightKey(row):
"This creates right note with given row"
return Key(right(row))
def TopKey(col, val):
"This creates top ctrl with given column"
return Ctrl(top(col), val)
# NOTES
A = 21
B = 23
C = 24
D = 26
E = 28
F = 29
G = 31
Octave = 12 # semitones
minors = { # scale
0: 0, # interval in semitones
1: 2,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 12,
}
minharms = { # scale
0: 0, # interval in semitones
1: 2,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 11, # harmonic
}
majors = {
0: 0,
1: 2,
2: 4,
3: 5,
4: 7,
5: 9,
6: 11,
7: 12,
}
dorics = {
0: 0,
1: 2,
2: 3,
3: 5,
4: 7,
5: 9,
6: 10,
7: 12,
}
phrygians = {
0: 0,
1: 1,
2: 3,
3: 5,
4: 7,
5: 8,
6: 10,
7: 12,
}
# I only use these scales - feel free to add your own!
# Now the same thing, but to feed into Transpose:
Minor = [minors[i] - i for i in side]
MinHarm = [minharms[i] - i for i in side]
Major = [majors[i] - i for i in side]
Doric = [dorics[i] - i for i in side]
Phrygian = [phrygians[i] - i for i in side]
# How to use it in practice:
def OctFilter(col, tonic):
return KeyFilter(notes=[(tonic + col + (i * Octave)) for i in longside])
def MakeScale(tonic, scale):
return [OctFilter(i, tonic) >> Transpose(scale[i]) for i in side]
|
m4773rcl0ud/launchpaddings
|
launchpad_utils.py
|
Python
|
gpl-3.0
| 4,442
|
# ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from abc import ABCMeta
from abc import abstractmethod
from collections import namedtuple
from eos.const.eos import Restriction
from eos.const.eve import AttrId
from eos.restriction.exception import RestrictionValidationError
from .base import BaseRestriction
ResourceErrorData = namedtuple(
'ResourceErrorData', ('total_use', 'output', 'item_use'))
class ResourceRestriction(BaseRestriction, metaclass=ABCMeta):
"""Base class for all resource restrictions.
Resources in this context is something produced by ship/character and
consumed by other items.
"""
def __init__(self, fit):
self.__fit = fit
@property
@abstractmethod
def _stat_name(self):
"""This name will be used to get numbers from stats service."""
...
@property
@abstractmethod
def _use_attr_id(self):
...
def validate(self):
# Use stats module to get resource use and output
stats = getattr(self.__fit.stats, self._stat_name)
total_use = stats.used
# Can be None, so fall back to 0 in this case
output = stats.output or 0
# If we're not out of resource, do nothing
if total_use <= output:
return
tainted_items = {}
for item in stats._users:
resource_use = item.attrs[self._use_attr_id]
# Ignore items which do not actually consume resource
if resource_use <= 0:
continue
tainted_items[item] = ResourceErrorData(
total_use=total_use,
output=output,
item_use=resource_use)
raise RestrictionValidationError(tainted_items)
class CpuRestriction(ResourceRestriction):
"""CPU use by items should not exceed ship CPU output.
Details:
For validation, stats module data is used.
"""
type = Restriction.cpu
_stat_name = 'cpu'
_use_attr_id = AttrId.cpu
class PowergridRestriction(ResourceRestriction):
"""Power grid use by items should not exceed ship power grid output.
Details:
For validation, stats module data is used.
"""
type = Restriction.powergrid
_stat_name = 'powergrid'
_use_attr_id = AttrId.power
class CalibrationRestriction(ResourceRestriction):
"""Calibration use by items should not exceed ship calibration output.
Details:
For validation, stats module data is used.
"""
type = Restriction.calibration
_stat_name = 'calibration'
_use_attr_id = AttrId.upgrade_cost
class DroneBayVolumeRestriction(ResourceRestriction):
"""Drone bay volume use by items should not exceed ship drone bay volume.
Details:
For validation, stats module data is used.
"""
type = Restriction.dronebay_volume
_stat_name = 'dronebay'
_use_attr_id = AttrId.volume
class DroneBandwidthRestriction(ResourceRestriction):
"""Drone bandwidth use by items should not exceed ship drone bandwidth.
Details:
For validation, stats module data is used.
"""
type = Restriction.drone_bandwidth
_stat_name = 'drone_bandwidth'
_use_attr_id = AttrId.drone_bandwidth_used
|
pyfa-org/eos
|
eos/restriction/restriction/resource.py
|
Python
|
lgpl-3.0
| 4,071
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of CKAN DataStore Restful Extension.
# CKAN DataStore Restful Extension is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# CKAN DataStore Restful Extension is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with CKAN DataStore Restful Extension. If not, see <http://www.gnu.org/licenses/>.
import ckan.plugins as plugins
GET = dict(method=['GET'])
PUT = dict(method=['PUT'])
POST = dict(method=['POST'])
DELETE = dict(method=['DELETE'])
class RestfulDataStorePlugin(plugins.SingletonPlugin):
plugins.implements(plugins.IRoutes, inherit=True)
def after_map(self, m):
#Create/update the resource
m.connect('/resource/{resource_id}',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='upsert_resource', conditions=PUT)
#Get the entire resource
m.connect('/resource/{resource_id}',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='structure', conditions=GET)
#Delete a resource
m.connect('/resource/{resource_id}',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='delete_resource', conditions=DELETE)
#Get the entire collection of entries
m.connect('/resource/{resource_id}/entry',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='search_entries', conditions=GET)
#Insert a entry or a set of entries
m.connect('/resource/{resource_id}/entry',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='create_entries', conditions=POST)
#Create/update an entry
m.connect('/resource/{resource_id}/entry/{entry_id}',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='upsert_entry', conditions=PUT)
#Get an entry
m.connect('/resource/{resource_id}/entry/{entry_id}',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='get_entry', conditions=GET)
#Delete an entry
m.connect('/resource/{resource_id}/entry/{entry_id}',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='delete_entry', conditions=DELETE)
#Search SQL
m.connect('/search_sql',
controller='ckanext.datastore_restful.controller:RestfulDatastoreController',
action='sql', conditions=GET)
return m
|
conwetlab/ckanext-datastore_restful
|
ckanext/datastore_restful/plugin.py
|
Python
|
agpl-3.0
| 3,308
|
from django.conf.urls import include, url
from django.apps import apps as djangoapps
def load_namespaced_urls(urlpatterns, *app_labels):
"""
:param patterns urlpatterns: The django url patterns object
add any urls.py for apps with tiles. namespaced to the app name.
"""
for app_label in app_labels:
app_name = djangoapps.get_app_config(app_label).name
urlpatterns.append(url(app_label + '/', include( app_name + '.urls', namespace=app_label)))
|
dgreisen-cfpb/pantheon
|
devdash/tiles/urls.py
|
Python
|
cc0-1.0
| 482
|
import pytest
from pytest import approx
import time
from fakeredis import FakeStrictRedis
from celery_once.backends.redis import parse_url, Redis
from celery_once.tasks import AlreadyQueued
from redis.lock import Lock as RedisLock
def test_parse_redis_details_tcp_default_args():
details = parse_url('redis://localhost:6379/')
assert details == {'host': 'localhost', 'port': 6379}
def test_parse_url_tcp_with_db():
details = parse_url('redis://localhost:6379/3')
assert details == {'host': 'localhost', 'port': 6379, 'db': 3}
def test_parse_url_tcp_no_port():
details = parse_url('redis://localhost')
assert details == {'host': 'localhost'}
def test_parse_url_tcp_with_password():
details = parse_url('redis://:ohai@localhost:6379')
assert details == {'host': 'localhost', 'port': 6379, 'password': 'ohai'}
def test_parse_url_unix_sock_no_options():
details = parse_url('redis+socket:///var/run/redis/redis.sock')
assert details == {'unix_socket_path': '/var/run/redis/redis.sock'}
def test_parse_url_unix_sock_with_options():
details = parse_url('redis+socket:///var/run/redis/redis.sock?db=2&socket_timeout=2')
assert details == {
'unix_socket_path': '/var/run/redis/redis.sock',
'db': 2,
'socket_timeout': 2.0
}
def test_parse_url_with_ssl():
details = parse_url('rediss://localhost:6379/3')
assert details == {'host': 'localhost', 'port': 6379, 'db': 3, 'ssl': True}
def test_parse_unsupported_url():
with pytest.raises(ValueError):
parse_url('amqp://guest:guest@localhost:5672/potato')
@pytest.fixture()
def redis(monkeypatch):
fake_redis = FakeStrictRedis()
fake_redis.flushall()
monkeypatch.setattr("celery_once.backends.redis.Redis.redis", fake_redis)
return fake_redis
@pytest.fixture()
def backend():
backend = Redis({'url': "redis://localhost:1337"})
return backend
def test_redis_raise_or_lock(redis, backend):
assert redis.get("test") is None
backend.raise_or_lock(key="test", timeout=60)
assert redis.get("test") is not None
def test_redis_raise_or_lock_locked(redis, backend):
# Set to expire in 30 seconds!
lock = RedisLock(redis, "test", timeout=30)
lock.acquire()
with pytest.raises(AlreadyQueued) as e:
backend.raise_or_lock(key="test", timeout=60)
assert e.value.countdown == approx(30.0, rel=0.1)
assert "Expires in" in e.value.message
def test_redis_raise_or_lock_locked_and_expired(redis, backend):
lock = RedisLock(redis, "test", timeout=1)
lock.acquire()
time.sleep(1) # wait for lock to expire
backend.raise_or_lock(key="test", timeout=60)
assert redis.get("test") is not None
def test_redis_clear_lock(redis, backend):
redis.set("test", 1326499200 + 30)
backend.clear_lock("test")
assert redis.get("test") is None
def test_redis_cached_property(mocker, monkeypatch):
# Remove any side effect previous tests could have had
monkeypatch.setattr('celery_once.backends.redis.redis', None)
mock_parse = mocker.patch('celery_once.backends.redis.parse_url')
mock_parse.return_value = {
'host': "localhost"
}
# Despite the class being inited twice, should only setup once.
Redis({
'url': "redis://localhost:1337"
})
Redis({})
assert mock_parse.call_count == 1
|
cameronmaske/celery-once
|
tests/unit/backends/test_redis.py
|
Python
|
bsd-2-clause
| 3,364
|
from i3pystatus import SettingsBase, IntervalModule, formatp
from i3pystatus.core.util import user_open, internet, require
class Backend(SettingsBase):
settings = ()
class Weather(IntervalModule):
'''
This is a generic weather-checker which must use a configured weather
backend. For list of all available backends see :ref:`weatherbackends`.
Left clicking on the module will launch the forecast page for the location
being checked.
.. _weather-formatters:
.. rubric:: Available formatters
* `{city}` — Location of weather observation
* `{condition}` — Current weather condition (Rain, Snow, Overcast, etc.)
* `{icon}` — Icon representing the current weather condition
* `{observation_time}` — Time of weather observation (supports strftime format flags)
* `{current_temp}` — Current temperature, excluding unit
* `{low_temp}` — Forecasted low temperature, excluding unit
* `{high_temp}` — Forecasted high temperature, excluding unit (may be
empty in the late afternoon)
* `{temp_unit}` — Either ``°C`` or ``°F``, depending on whether metric or
* `{feelslike}` — "Feels Like" temperature, excluding unit
* `{dewpoint}` — Dewpoint temperature, excluding unit
imperial units are being used
* `{wind_speed}` — Wind speed, excluding unit
* `{wind_unit}` — Either ``kph`` or ``mph``, depending on whether metric or
imperial units are being used
* `{wind_direction}` — Wind direction
* `{wind_gust}` — Speed of wind gusts in mph/kph, excluding unit
* `{pressure}` — Barometric pressure, excluding unit
* `{pressure_unit}` — ``mb`` or ``in``, depending on whether metric or
imperial units are being used
* `{pressure_trend}` — ``+`` if rising, ``-`` if falling, or an empty
string if the pressure is steady (neither rising nor falling)
* `{visibility}` — Visibility distance, excluding unit
* `{visibility_unit}` — Either ``km`` or ``mi``, depending on whether
metric or imperial units are being used
* `{humidity}` — Current humidity, excluding percentage symbol
* `{uv_index}` — UV Index
This module supports the :ref:`formatp <formatp>` extended string format
syntax. This allows for values to be hidden when they evaluate as False.
This comes in handy for the :py:mod:`weathercom <.weather.weathercom>`
backend, which at a certain point in the afternoon will have a blank
``{high_temp}`` value. Using the following snippet in your format string
will only display the high temperature information if it is not blank:
::
{current_temp}{temp_unit}[ Hi: {high_temp}[{temp_unit}]] Lo: {low_temp}{temp_unit}
Brackets are evaluated from the outside-in, so the fact that the only
formatter in the outer block (``{high_temp}``) is empty would keep the
inner block from being evaluated at all, and entire block would not be
displayed.
See the following links for usage examples for the available weather
backends:
- :ref:`Weather.com <weather-usage-weathercom>`
- :ref:`Weather Underground <weather-usage-wunderground>`
'''
settings = (
('colorize', 'Vary the color depending on the current conditions.'),
('color_icons', 'Dictionary mapping weather conditions to tuples '
'containing a UTF-8 code for the icon, and the color '
'to be used.'),
('color', 'Display color (or fallback color if ``colorize`` is True). '
'If not specified, falls back to default i3bar color.'),
('backend', 'Weather backend instance'),
'interval',
'format',
)
required = ('backend',)
colorize = False
color_icons = {
'Fair': (u'\u263c', '#ffcc00'),
'Cloudy': (u'\u2601', '#f8f8ff'),
'Partly Cloudy': (u'\u2601', '#f8f8ff'), # \u26c5 is not in many fonts
'Rainy': (u'\u26c8', '#cbd2c0'),
'Thunderstorm': (u'\u03de', '#cbd2c0'),
'Sunny': (u'\u2600', '#ffff00'),
'Snow': (u'\u2603', '#ffffff'),
'default': ('', None),
}
color = None
backend = None
interval = 1800
format = '{current_temp}{temp_unit}'
on_leftclick = 'open_forecast_url'
def open_forecast_url(self):
if self.backend.forecast_url and self.backend.forecast_url != 'N/A':
user_open(self.backend.forecast_url)
def init(self):
pass
def get_color_data(self, condition):
'''
Disambiguate similarly-named weather conditions, and return the icon
and color that match.
'''
if condition not in self.color_icons:
# Check for similarly-named conditions if no exact match found
condition_lc = condition.lower()
if 'cloudy' in condition_lc:
if 'partly' in condition_lc:
condition = 'Partly Cloudy'
else:
condition = 'Cloudy'
elif 'thunder' in condition_lc or 't-storm' in condition_lc:
condition = 'Thunderstorm'
elif 'snow' in condition_lc:
condition = 'Snow'
elif 'rain' in condition_lc or 'showers' in condition_lc:
condition = 'Rainy'
elif 'sunny' in condition_lc:
condition = 'Sunny'
elif 'clear' in condition_lc or 'fair' in condition_lc:
condition = 'Fair'
return self.color_icons['default'] \
if condition not in self.color_icons \
else self.color_icons[condition]
@require(internet)
def run(self):
data = self.backend.weather_data()
data['icon'], condition_color = self.get_color_data(data['condition'])
color = condition_color if self.colorize else self.color
self.output = {
'full_text': formatp(self.format, **data).strip(),
'color': color,
}
|
eBrnd/i3pystatus
|
i3pystatus/weather/__init__.py
|
Python
|
mit
| 6,014
|
# Copyright 2019 The Dreamer Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
from dreamer.tools import count_dataset
from dreamer.tools import gif_summary
from dreamer.tools import image_strip_summary
from dreamer.tools import shape as shapelib
def plot_summary(titles, lines, labels, name):
# Use control dependencies to ensure that only one plot summary is executed
# at a time, since matplotlib is not thread safe.
def body_fn(lines):
fig, axes = plt.subplots(
nrows=len(titles), ncols=1, sharex=True, sharey=False,
squeeze=False, figsize=(6, 3 * len(lines)))
axes = axes[:, 0]
for index, ax in enumerate(axes):
ax.set_title(titles[index])
for line, label in zip(lines[index], labels[index]):
ax.plot(line, label=label)
if any(labels[index]):
ax.legend(frameon=False)
fig.tight_layout()
fig.canvas.draw()
image = np.frombuffer(fig.canvas.tostring_rgb(), dtype=np.uint8)
image = image.reshape(fig.canvas.get_width_height()[::-1] + (3,))
plt.close(fig)
return image
image = tf.py_func(body_fn, (lines,), tf.uint8)
image = image[None]
summary = tf.summary.image(name, image)
return summary
def data_summaries(
data, postprocess_fn, histograms=False, max_batch=6, name='data'):
summaries = []
with tf.variable_scope(name):
if histograms:
for key, value in data.items():
if key in ('image',):
continue
summaries.append(tf.summary.histogram(key, data[key]))
image = data['image'][:max_batch]
if postprocess_fn:
image = postprocess_fn(image)
summaries.append(image_strip_summary.image_strip_summary('image', image))
return summaries
def dataset_summaries(directory, name='dataset'):
summaries = []
with tf.variable_scope(name):
episodes = count_dataset.count_dataset(directory)
summaries.append(tf.summary.scalar('episodes', episodes))
return summaries
def state_summaries(
cell, prior, posterior, histograms=False, name='state'):
summaries = []
divergence = cell.divergence_from_states(posterior, prior)
prior = cell.dist_from_state(prior)
posterior = cell.dist_from_state(posterior)
prior_entropy = prior.entropy()
posterior_entropy = posterior.entropy()
nan_to_num = lambda x: tf.where(tf.is_nan(x), tf.zeros_like(x), x)
with tf.variable_scope(name):
if histograms:
summaries.append(tf.summary.histogram(
'prior_entropy_hist', nan_to_num(prior_entropy)))
summaries.append(tf.summary.scalar(
'prior_entropy', tf.reduce_mean(prior_entropy)))
summaries.append(tf.summary.scalar(
'prior_std', tf.reduce_mean(prior.stddev())))
if histograms:
summaries.append(tf.summary.histogram(
'posterior_entropy_hist', nan_to_num(posterior_entropy)))
summaries.append(tf.summary.scalar(
'posterior_entropy', tf.reduce_mean(posterior_entropy)))
summaries.append(tf.summary.scalar(
'posterior_std', tf.reduce_mean(posterior.stddev())))
summaries.append(tf.summary.scalar(
'divergence', tf.reduce_mean(divergence)))
return summaries
def dist_summaries(dists, obs, name='dist_summaries'):
summaries = []
with tf.variable_scope(name):
for name, dist in dists.items():
mode = tf.cast(dist.mode(), tf.float32)
mode_mean, mode_var = tf.nn.moments(mode, list(range(mode.shape.ndims)))
mode_std = tf.sqrt(mode_var)
summaries.append(tf.summary.scalar(name + '_mode_mean', mode_mean))
summaries.append(tf.summary.scalar(name + '_mode_std', mode_std))
std = dist.stddev()
std_mean, std_var = tf.nn.moments(std, list(range(std.shape.ndims)))
std_std = tf.sqrt(std_var)
summaries.append(tf.summary.scalar(name + '_std_mean', std_mean))
summaries.append(tf.summary.scalar(name + '_std_std', std_std))
if hasattr(dist, 'distribution') and hasattr(
dist.distribution, 'distribution'):
inner = dist.distribution.distribution
inner_std = tf.reduce_mean(inner.stddev())
summaries.append(tf.summary.scalar(name + '_inner_std', inner_std))
if name in obs:
log_prob = tf.reduce_mean(dist.log_prob(obs[name]))
summaries.append(tf.summary.scalar(name + '_log_prob', log_prob))
abs_error = tf.reduce_mean(tf.abs(mode - obs[name]))
summaries.append(tf.summary.scalar(name + '_abs_error', abs_error))
return summaries
def image_summaries(dist, target, name='image', max_batch=6):
summaries = []
with tf.variable_scope(name):
image = dist.mode()[:max_batch]
target = target[:max_batch]
error = ((image - target) + 1) / 2
# empty_frame = 0 * target[:max_batch, :1]
# change = tf.concat([empty_frame, image[:, 1:] - image[:, :-1]], 1)
# change = (change + 1) / 2
summaries.append(image_strip_summary.image_strip_summary(
'prediction', image))
# summaries.append(image_strip_summary.image_strip_summary(
# 'change', change))
# summaries.append(image_strip_summary.image_strip_summary(
# 'error', error))
# Concat prediction and target vertically.
frames = tf.concat([target, image, error], 2)
# Stack batch entries horizontally.
frames = tf.transpose(frames, [1, 2, 0, 3, 4])
s = shapelib.shape(frames)
frames = tf.reshape(frames, [s[0], s[1], s[2] * s[3], s[4]])
summaries.append(gif_summary.gif_summary(
'gif', frames[None], max_outputs=1, fps=20))
return summaries
def objective_summaries(objectives, name='objectives'):
summaries = []
with tf.variable_scope(name):
for objective in objectives:
summaries.append(tf.summary.scalar(objective.name, objective.value))
return summaries
def prediction_summaries(dists, data, state, name='state'):
summaries = []
with tf.variable_scope(name):
# Predictions.
log_probs = {}
for key, dist in dists.items():
if key in ('image',):
continue
if key not in data:
continue
# We only look at the first example in the batch.
log_prob = dist.log_prob(data[key])[0]
prediction = dist.mode()[0]
truth = data[key][0]
plot_name = key
# Ensure that there is a feature dimension.
if prediction.shape.ndims == 1:
prediction = prediction[:, None]
truth = truth[:, None]
prediction = tf.unstack(tf.transpose(prediction, (1, 0)))
truth = tf.unstack(tf.transpose(truth, (1, 0)))
lines = list(zip(prediction, truth))
titles = ['{} {}'.format(key.title(), i) for i in range(len(lines))]
labels = [['Prediction', 'Truth']] * len(lines)
plot_name = '{}_trajectory'.format(key)
# The control dependencies are needed because rendering in matplotlib
# uses global state, so rendering two plots in parallel interferes.
with tf.control_dependencies(summaries):
summaries.append(plot_summary(titles, lines, labels, plot_name))
log_probs[key] = log_prob
log_probs = sorted(log_probs.items(), key=lambda x: x[0])
titles, lines = zip(*log_probs)
titles = [title.title() for title in titles]
lines = [[line] for line in lines]
labels = [[None]] * len(titles)
plot_name = 'logprobs'
with tf.control_dependencies(summaries):
summaries.append(plot_summary(titles, lines, labels, plot_name))
return summaries
|
google-research/dreamer
|
dreamer/tools/summary.py
|
Python
|
apache-2.0
| 8,108
|
# -*- coding: utf-8 -*-
__license__ = 'GPL 3'
__copyright__ = '2009, John Schember <john@nachtimwald.com>'
__docformat__ = 'restructuredtext en'
from PyQt5.Qt import QListWidgetItem, Qt
from calibre.gui2.convert.txt_input_ui import Ui_Form
from calibre.gui2.convert import Widget
from calibre.ebooks.conversion.plugins.txt_input import MD_EXTENSIONS
class PluginWidget(Widget, Ui_Form):
TITLE = _('TXT input')
HELP = _('Options specific to')+' TXT '+_('input')
COMMIT_NAME = 'txt_input'
ICON = I('mimetypes/txt.png')
def __init__(self, parent, get_option, get_help, db=None, book_id=None):
Widget.__init__(self, parent,
['paragraph_type', 'formatting_type', 'markdown_extensions',
'preserve_spaces', 'txt_in_remove_indents'])
self.db, self.book_id = db, book_id
for x in get_option('paragraph_type').option.choices:
self.opt_paragraph_type.addItem(x)
for x in get_option('formatting_type').option.choices:
self.opt_formatting_type.addItem(x)
self.md_map = {}
for name, text in MD_EXTENSIONS.iteritems():
i = QListWidgetItem('%s - %s' % (name, text), self.opt_markdown_extensions)
i.setFlags(Qt.ItemIsUserCheckable | Qt.ItemIsEnabled)
i.setData(Qt.UserRole, name)
self.md_map[name] = i
self.initialize_options(get_option, get_help, db, book_id)
def set_value_handler(self, g, val):
if g is self.opt_markdown_extensions:
for i in self.md_map.itervalues():
i.setCheckState(Qt.Unchecked)
for x in val.split(','):
x = x.strip()
if x in self.md_map:
self.md_map[x].setCheckState(Qt.Checked)
return True
def get_value_handler(self, g):
if g is not self.opt_markdown_extensions:
return Widget.get_value_handler(self, g)
return ', '.join(unicode(i.data(Qt.UserRole) or '') for i in self.md_map.itervalues() if i.checkState())
def connect_gui_obj_handler(self, g, f):
if g is not self.opt_markdown_extensions:
raise NotImplementedError()
g.itemChanged.connect(lambda item: f())
|
jelly/calibre
|
src/calibre/gui2/convert/txt_input.py
|
Python
|
gpl-3.0
| 2,232
|
# -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Main Controller"""
import logging
from tg import expose, request, config
from tg.decorators import with_trailing_slash
from tg.flash import TGFlash
from pylons import tmpl_context as c
from allura.app import SitemapEntry
from allura.lib.base import WsgiDispatchController
from allura.lib import plugin
from allura.controllers.error import ErrorController
from allura import model as M
from allura.lib.widgets import project_list as plw
from .auth import AuthController
from .trovecategories import TroveCategoryController
from .search import SearchController, ProjectBrowseController
from .static import NewForgeController
from .site_admin import SiteAdminController
from .rest import RestController
__all__ = ['RootController']
log = logging.getLogger(__name__)
TGFlash.static_template = '''$('#messages').notify('%(message)s', {status: '%(status)s'});'''
class W:
project_summary = plw.ProjectSummary()
class RootController(WsgiDispatchController):
"""
The root controller for the allura application.
All the other controllers and WSGI applications should be mounted on this
controller. For example::
panel = ControlPanelController()
another_app = AnotherWSGIApplication()
Keep in mind that WSGI applications shouldn't be mounted directly: They
must be wrapped around with :class:`tg.controllers.WSGIAppController`.
"""
auth = AuthController()
error = ErrorController()
nf = NewForgeController()
search = SearchController()
rest = RestController()
categories = TroveCategoryController()
def __init__(self):
n_url_prefix = '/%s/' % request.path.split('/')[1]
n = M.Neighborhood.query.get(url_prefix=n_url_prefix)
if n and not n.url_prefix.startswith('//'):
n.bind_controller(self)
self.browse = ProjectBrowseController()
self.nf.admin = SiteAdminController()
super(RootController, self).__init__()
def _setup_request(self):
c.project = c.app = None
c.memoize_cache = {}
c.user = plugin.AuthenticationProvider.get(
request).authenticate_request()
assert c.user is not None, ('c.user should always be at least User.anonymous(). '
'Did you run `paster setup-app` to create the database?')
def _cleanup_request(self):
pass
@expose('jinja:allura:templates/neighborhood_list.html')
@with_trailing_slash
def index(self, **kw):
"""Handle the front-page."""
neighborhoods = M.Neighborhood.query.find().sort('name')
categories = M.ProjectCategory.query.find(
{'parent_id': None}).sort('name').all()
c.custom_sidebar_menu = [
SitemapEntry(cat.label, '/browse/' + cat.name) for cat in categories
]
return dict(neighborhoods=neighborhoods, title="All Neighborhoods")
|
apache/incubator-allura
|
Allura/allura/controllers/root.py
|
Python
|
apache-2.0
| 3,789
|
import ipaddress
class IpValidation():
"""
set of static functions, used to validate ips
"""
class InvalidIpException(Exception):
"""
is raise if a ip is not valid
"""
def __init__(self, ip_string):
"""
:param ip_string: the ip which caused this exception
:type ip_string: str
"""
super().__init__('the ip: "{ip_string}" is not valid'.format(ip_string=ip_string))
class InvalidRangeException(Exception):
"""
is raised if a range is not valid
"""
def __init__(self, from_ip, to_ip, net_address, gateway_ip=None):
"""
:param from_ip: the ranges from address
:type from_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param to_ip: the ranges to address
:type to_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param net_address: the ranges net adress
:type net_address: ipaddress.IPv4Network or ipaddress.IPv6Network
:param gateway_ip: optional gateway address
:type gateway_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
"""
super().__init__(
(
'invalid range! '
'the net address ({net_address}), {gateway_info}the from ip ({from_ip}) '
'and to ip ({to_ip}) do not match'
).format(
net_address=str(net_address),
from_ip=str(from_ip),
to_ip=str(to_ip),
gateway_info='the gateway ({gateway_ip}), '.format(gateway_ip=str(gateway_ip)) if gateway_ip else ''
)
)
@staticmethod
def validate_ip_address(ip_string):
"""
validates an ip given as a string
:param ip_string: the ip to validate
:type ip_string: str
:return: the validated ip
:rtype: ipaddress.IPv4Address or ipaddress.IPv6Address
:raises IpValidation.InvalidIpException: if ip is not valid
"""
return IpValidation._validate_ip_cast(ip_string, ipaddress.ip_address)
@staticmethod
def validate_net_address(ip_string):
"""
validates an net address ip given as a string
:param ip_string: the ip to validate
:type ip_string: str
:return: the validated ip
:rtype: ipaddress.IPv4Network or ipaddress.IPv6Network
:raises IpValidation.InvalidIpException: if ip is not valid
"""
return IpValidation._validate_ip_cast(ip_string, ipaddress.ip_network)
@staticmethod
def validate_ip_range(from_ip, to_ip, net_address, gateway_ip=None):
"""
validate a ip range
:param from_ip: the ranges from address
:type from_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param to_ip: the ranges to address
:type to_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param net_address: the ranges net adress
:type net_address: ipaddress.IPv4Network or ipaddress.IPv6Network
:param gateway_ip: optional gateway address
:type gateway_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:raises IpValidation.InvalidRangeException: if range is not valid
"""
if (
from_ip not in net_address
or to_ip not in net_address
or from_ip > to_ip
or gateway_ip and gateway_ip not in net_address
):
raise IpValidation.InvalidRangeException(from_ip, to_ip, net_address, gateway_ip)
@staticmethod
def _validate_ip_cast(ip_string, cast_function):
"""
casts an ip string and validates the cast
:param ip_string: an ip represented as a string
:type ip_string: str
:param cast_function: the function used to cast the string ip
:type cast_function: (str) -> ipaddress.IPv4Address or (str) -> ipaddress.IPv6Address
or (str) -> ipaddress.IPv4Network or (str) -> ipaddress.IPv6Network
:return: the casted ip
:rtype: (str) -> ipaddress.IPv4Address or (str) -> ipaddress.IPv6Address
or (str) -> ipaddress.IPv4Network or (str) -> ipaddress.IPv6Network
:raises IpValidation.InvalidIpException: if ip is not valid
"""
try:
return cast_function(ip_string)
except ValueError:
raise IpValidation.InvalidIpException(ip_string)
class NetworkMapper():
"""
takes care of assigning network settings for a given blueprint, to given interfaces
"""
class InvalidNetworkSettingsException(Exception):
"""
is raised if network settings are not valid
"""
pass
class NoMappingFoundException(Exception):
"""
is raised if there is a ip which no mapping was found for
"""
pass
PUBLIC_IP_MATCHER = IpValidation.validate_ip_address('0.0.0.0')
def __init__(self, network_settings):
"""
is initialized with the network settings it should use
:param network_settings: the network settings to use
:type network_settings: dict
:raises NetworkMapper.InvalidNetworkSettingsException: in case the settings are not valid
"""
self.networks = self._create_network_structure(network_settings)
def map_interfaces(self, interfaces, blueprint):
"""
maps a set of interfaces onto their new interfaces, considering the network settings and the given blueprint
:param interfaces: dict of interfaces
:type interfaces: dict
:param blueprint: blueprint which is used to map the interfaces
:type blueprint: dict
:return: a list of dicts representing the new interfaces
:rtype: list[dict]
:raises NetworkMapper.InvalidNetworkSettingsException: in case the blueprints network settings are not valid
"""
try:
mapped_interfaces = []
network_mapping = self._create_network_mapping(blueprint)
for interface_id, interface in interfaces.items():
if not IpValidation.validate_ip_address(interface['ip']).is_loopback:
network = self._map_network(interface['ip'], interface['net_mask'], network_mapping)
mapped_interfaces.append(self.assign_network_settings(network, interface_id))
return mapped_interfaces
except KeyError:
raise NetworkMapper.InvalidNetworkSettingsException(
'following blueprint network mapping is not valid:\n{blueprint}'.format(
blueprint=str(blueprint)
)
)
def _map_network(self, ip_string, net_mask_string, network_mapping):
"""
uses the given network mapping, to decide, which network the given ip is mapped to
:param ip_string: the ip you want to map
:type ip_string: str
:param net_mask_string: the ips net mask
:type net_mask_string: str
:param network_mapping: the network mapping used to determine how the ip is mapped
:type network_mapping: dict
:return: the network the ip is mapped to
:rtype: dict
"""
ip = IpValidation.validate_ip_address(ip_string)
net_mask = IpValidation.validate_ip_address(net_mask_string)
net_address = next(
(net_address for net_address in network_mapping if ip in net_address and net_address.netmask == net_mask),
None
)
if net_address is None:
net_address = next(
(net_address for net_address in network_mapping if self.PUBLIC_IP_MATCHER in net_address),
None
)
if net_address is None:
raise NetworkMapper.NoMappingFoundException(
'no matching network mapping was found, for the following ip: {ip}'.format(ip=str(ip))
)
return network_mapping[net_address]
def assign_network_settings(self, network, source_interface=None):
"""
assigns concrete network settings for a given network
:param network: the network you want to get the settings for
:type network: dict
:param source_interface: the source interface, in case you want it to be part of the setting
:type source_interface: dict
:return:
"""
ip = self._get_ip(network)
return {
'network_id': network['network'],
'ip': str(ip) if ip else None,
'gateway': str(self.networks[network['network']]['gateway'])
if self.networks[network['network']]['gateway'] else None,
'net_mask': str(self.networks[network['network']]['net_address'].netmask) if ip else None,
**({'source_interface': source_interface} if source_interface else {}),
}
def _get_ip(self, network):
"""
returns the ip for a given blueprints network setting
:param network: the network you want to get an ip for
:type network: dict
:return: the ip
:rtype: ipaddress.IPv4Address or ipaddress.IPv6Address
"""
if 'static' in network:
static_ip = IpValidation.validate_ip_address(network['static'])
if static_ip not in self.networks[network['network']]['net_address']:
raise NetworkMapper.InvalidNetworkSettingsException(
'static ip "{static_ip}" does not fit into the network "{net_address}"'.format(
static_ip=str(static_ip),
net_address=str(self.networks[network['network']]['net_address']),
)
)
return static_ip
if 'range' in network:
return self._get_ip_distributor(
network['network'],
IpValidation.validate_ip_address(network['range']['from']),
IpValidation.validate_ip_address(network['range']['to']),
).get_next_ip()
return None
def _get_ip_distributor(self, network_id, from_ip, to_ip):
"""
returns the IpDistributor which should be used for given ip range
:param network_id: the id of the network you want to have an IpDistributor for
:type network_id: str
:param from_ip: the ip the range starts at
:type from_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param to_ip: the ip the range stops at
:type to_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:return: the ip distributor which should be used
:rtype: IpDistributor
"""
if (from_ip, to_ip,) not in self.networks[network_id]['distributors']:
self.networks[network_id]['distributors'][(from_ip, to_ip,)] = IpDistributor(
from_ip, to_ip, self.networks[network_id]['net_address']
)
return self.networks[network_id]['distributors'][(from_ip, to_ip,)]
def _create_network_mapping(self, blueprint):
"""
creates a network mapping from a blueprint
:param blueprint: the blueprint to create the network mapping with
:type blueprint: dict
:return: the network mapping
:rtype: dict
"""
network_mapping = {}
for net_address_string, mapping_config in blueprint['network_mapping'].items():
net_address = IpValidation.validate_net_address(net_address_string)
network_mapping[net_address] = {
'network': mapping_config['network']
}
if 'static' in mapping_config:
network_mapping[net_address]['static'] = mapping_config['static']
elif 'range' in mapping_config:
network_mapping[net_address]['range'] = mapping_config['range']
return network_mapping
def _create_network_structure(self, network_settings):
"""
creates an internally used data structure which represents the network settings
:param network_settings: the network settings
:type network_settings: dict
:return: the network setting representation
:rtype: dict
:raises NetworkMapper.InvalidNetworkSettingsException: in case the settings are not valid
"""
try:
networks = {}
for network_id in network_settings:
net_address = IpValidation.validate_net_address(network_settings[network_id]['net'])
gateway_address = None
if 'gateway' in network_settings[network_id]:
gateway_address = IpValidation.validate_ip_address(network_settings[network_id]['gateway'])
if gateway_address not in net_address:
raise NetworkMapper.InvalidNetworkSettingsException(
(
'network {network_id} is not valid! '
'gateway ip ({gateway_ip}) does not match the net address ({net_address})'
).format(network_id=network_id, gateway_address=gateway_address, net_address=net_address)
)
networks[network_id] = {
'net_address': net_address,
'gateway': gateway_address,
'distributors': {},
}
return networks
except KeyError:
raise NetworkMapper.InvalidNetworkSettingsException(
'the following network settings are not valid:\n{network_settings}'.format(
network_settings=str(network_settings)
)
)
class IpDistributor():
"""
takes care of distributing ips of a given net in a given range
"""
class RangeExhaustedException(Exception):
"""
is raised when the range is exhausted
"""
def __init__(self, from_ip, to_ip):
"""
:param from_ip: the ranges from address
:type from_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param to_ip: the ranges to address
:type to_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
"""
super().__init__('the ip range ({from_ip} - {to_ip}) is exhausted!'.format(
from_ip=from_ip,
to_ip=to_ip,
))
def __init__(self, from_ip, to_ip, net_address):
"""
initialized with a given range, in a given network
:param from_ip: the ip the ranges starts at
:type from_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param to_ip: the ip the range stops it (included)
:type to_ip: ipaddress.IPv4Address or ipaddress.IPv6Address
:param net_address: the network address, in which the ips are distributed
:type net_address: ipaddress.IPv4Network or ipaddress.IPv6Network
:raises IpValidation.InvalidIpException: if a ip is not valid
:raises IpValidation.InvalidRangeException: if range is not valid
"""
self.last_distributed_ip = None
self.from_ip = from_ip
self.to_ip = to_ip
self.net_address = net_address
IpValidation.validate_ip_range(self.from_ip, self.to_ip, self.net_address)
def get_next_ip(self):
"""
gets the next available ip address
:return: ip address
:rtype: str
:raises IpValidation.RangeExhaustedException: raised if range is exhausted
"""
if self.last_distributed_ip:
self.last_distributed_ip = self.last_distributed_ip + 1
else:
self.last_distributed_ip = self.from_ip
if self.last_distributed_ip > self.to_ip:
raise IpDistributor.RangeExhaustedException(self.from_ip, self.to_ip)
if (
self.last_distributed_ip == self.net_address.network_address
or self.last_distributed_ip == self.net_address.broadcast_address
):
return self.get_next_ip()
return str(self.last_distributed_ip)
|
jdepoix/goto_cloud
|
goto_cloud/migration_plan_parsing/network_mapping.py
|
Python
|
mit
| 16,295
|
# -*- coding: UTF-8 -*-
'''
Debug
==========
O objetivo desta classe é criar uma interface única para mostrar informacões de debug.
Caso seja necessário restringir o nível de informacões no log, você pode modificar o valor
da variável *loglevel* para qualquer uma das constantes LOG_ALL, LOG_NOTE, LOG_WARN ou
LOG_ERROR.
Níveis de mensagem
------
* **info** Mensagem com informacões normalmente irrelevantes para a execucão do programa
* **note** Mensagem de debug
* **warn** Mensagem de aviso (eg. funcão está deprecated)
* **error** Mensagem de erro
'''
class Singleton(type):
_instances = {}
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class Debug():
#Singleton
__metaclass__ = Singleton
'''
Níveis de log
'''
LOG_ALL = 0
LOG_NOTE = 1
LOG_WARN = 2
LOG_ERROR = 3
'''
Cores utilizadas em cada uma das mensagens
'''
COLOR_INFO = '\033[90m'
COLOR_NOTE = '\033[92m'
COLOR_WARN = '\033[93m'
COLOR_ERROR = '\033[91m'
COLOR_ENDL = '\033[0m'
#Configuration
loglevel = LOG_ALL
'''
Message types
'''
def info(self, message, header):
if self.loglevel <= self.LOG_ALL: print self.COLOR_INFO + header + ": " + message + self.COLOR_ENDL
def note(self, message, header):
if self.loglevel <= self.LOG_NOTE: print self.COLOR_NOTE + header + ": " + message + self.COLOR_ENDL
def warn(self, message, header):
if self.loglevel <= self.LOG_WARN: print self.COLOR_WARN + header + ": " + message + self.COLOR_ENDL
def error(self, message, header):
if self.loglevel <= self.LOG_ERROR : print self.COLOR_ERROR + header + ": " + message + self.COLOR_ENDL
|
drpexe/sigmawebplus
|
src/service/debug.py
|
Python
|
mit
| 1,902
|
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets a forecast for an existing line item. To determine
which ine items exist, run get_all_line_items.py."""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
forecast_service = client.GetService('ForecastService', version='v201306')
# Set the line item to get a forecast for.
line_item_id = 'INSERT_LINE_ITEM_ID'
# Get forecast for line item.
forecast = forecast_service.GetForecastById(line_item_id)[0]
matched = long(forecast['matchedUnits'])
available_percent = (long(forecast['availableUnits'])/(matched * 1.0)) * 100
# Display results.
print ('%s %s matched.\n%s%% %s available.'
% (matched, forecast['unitType'].lower(),
available_percent, forecast['unitType'].lower()))
if 'possibleUnits' in forecast:
possible_percent = (long(forecast['possibleUnits'])/(matched * 1.0)) * 100
print '%s%% %s possible' % (possible_percent, forecast['unitType'])
|
donspaulding/adspygoogle
|
examples/adspygoogle/dfp/v201306/get_forecast_by_id.py
|
Python
|
apache-2.0
| 1,936
|
#!/usr/bin/env python
# ---------------------------------------------------------------------------
# Licensing Information: You are free to use or extend these projects for
# education or reserach purposes provided that (1) you retain this notice
# and (2) you provide clear attribution to UC Berkeley, including a link
# to http://barc-project.com
#
# Attibution Information: The barc project ROS code-base was developed
# at UC Berkeley in the Model Predictive Control (MPC) lab by Jon Gonzales
# (jon.gonzales@berkeley.edu). The cloud services integation with ROS was developed
# by Kiet Lam (kiet.lam@berkeley.edu). The web-server app Dator was
# based on an open source project by Bruce Wootton
# ---------------------------------------------------------------------------
import os, sys
import time
os.chdir( os.path.expanduser("~") + '/barc/workspace/src/data_service/scripts')
from data_connection import *
from base import *
proj_path = os.path.expanduser("~") + '/barc/Dator'
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'dator.settings')
sys.path.append(proj_path)
os.chdir(proj_path)
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
from data_api.models import *
import boto3
CLOUD_CONFIG_LOCATION = os.path.expanduser("~") + '/cloud.cfg'
S3_VIDEOS_BUCKET = 'datorvideos2'
rosbag_dir = os.path.expanduser("~") + '/rosbag'
video_dir = os.path.expanduser("~") + '/video'
if __name__ == '__main__':
configurator = init_configurator(CLOUD_CONFIG_LOCATION)
data_connection = DataConnection(configurator)
s3_client = boto3.client('s3', use_ssl=0)
s3 = boto3.resource('s3', use_ssl=0)
for sig in Signal.objects.all():
try:
experiment = data_connection.get_or_create_experiment(sig.experiment.name)
print 'Looking at experiment : %s ' % experiment['name']
for setting in sig.experiment.setting_set.all():
setting_remote = data_connection.get_or_create_setting(setting.key, experiment)
print 'Looking at signal : %s ' % sig.name
#TODO: Check if AWS S3 token exists
if setting.key == 'video':
if setting.value.startswith(video_dir):
key_name = '%s_%s.avi' % (os.environ['TEAM_NAME'], sig.experiment.name)
video_path = '%s/%s.avi' %(video_dir, sig.experiment.name)
if os.path.isfile(video_path):
bucket = s3.Bucket(S3_VIDEOS_BUCKET)
bucket.Acl().put(ACL='public-read')
obj = s3.Object(S3_VIDEOS_BUCKET, key_name)
print 'Uploading video : %s ' % sig.experiment.name
obj.put(Body=open(video_path, 'rb'))
obj.Acl().put(ACL='public-read')
print 'Finished uploading video'
url = '{}/{}/{}'.format(s3_client.meta.endpoint_url,
S3_VIDEOS_BUCKET, key_name)
setting.value = url
setting.save()
os.remove(video_path)
else:
print 'WARNING: Video no longer available. You will have an unlinked video in your S3 storage at:'
print setting.value
setting.value = ''
setting.save()
data_connection.write_setting(setting.value, setting_remote['id'])
signal = data_connection.get_or_create_signal(sig.name, experiment)
try:
lst = LocalSignalTag.objects.filter(signal__name=sig.name, signal__experiment__name=sig.experiment.name)[0]
except (LocalSignalTag.DoesNotExist, IndexError) as e:
lst = LocalSignalTag()
lst.signal = sig
lst.uploaded = False
if not lst.uploaded:
print 'Uploading ...'
try:
data_connection.add_signal_points(signal['id'], sig.get_data())
lst.uploaded = True
lst.save()
print 'Finished Uploading'
sig.delete()
print 'Signal deleted locally'
except Exception as e:
print 'Uploading signal failed...'
print e
else:
sig.delete()
print 'Signal already stored on the web... deleting it locally'
lst.save()
except Exception as e:
print e
|
MPC-Berkeley/barc
|
workspace/src/data_service/scripts/upload.py
|
Python
|
mit
| 4,719
|
import os
import typing
from ..constants import DATA_HOME_DEFAULT_DIRNAME, DATA_HOME_ENV_VAR_NAME, DATA_HOME_SUB_DIRNAME
DataDirname = typing.NewType("DataDirname", str)
def data_dirname() -> DataDirname:
base_data_dir_name = os.getenv(DATA_HOME_ENV_VAR_NAME, os.path.expanduser(DATA_HOME_DEFAULT_DIRNAME))
return DataDirname(os.path.join(base_data_dir_name, DATA_HOME_SUB_DIRNAME))
|
larose/utt
|
utt/components/data_dirname.py
|
Python
|
gpl-3.0
| 396
|
# Copyright 2012-2013, Damian Johnson and The Tor Project
# See LICENSE for licensing information
import binascii
import stem.response
import stem.socket
import stem.util.str_tools
import stem.util.tor_tools
class AuthChallengeResponse(stem.response.ControlMessage):
"""
AUTHCHALLENGE query response.
:var str server_hash: server hash provided by tor
:var str server_nonce: server nonce provided by tor
"""
def _parse_message(self):
# Example:
# 250 AUTHCHALLENGE SERVERHASH=680A73C9836C4F557314EA1C4EDE54C285DB9DC89C83627401AEF9D7D27A95D5 SERVERNONCE=F8EA4B1F2C8B40EF1AF68860171605B910E3BBCABADF6FC3DB1FA064F4690E85
self.server_hash = None
self.server_nonce = None
if not self.is_ok():
raise stem.ProtocolError("AUTHCHALLENGE response didn't have an OK status:\n%s" % self)
elif len(self) > 1:
raise stem.ProtocolError("Received multiline AUTHCHALLENGE response:\n%s" % self)
line = self[0]
# sanity check that we're a AUTHCHALLENGE response
if not line.pop() == "AUTHCHALLENGE":
raise stem.ProtocolError("Message is not an AUTHCHALLENGE response (%s)" % self)
if line.is_next_mapping("SERVERHASH"):
value = line.pop_mapping()[1]
if not stem.util.tor_tools.is_hex_digits(value, 64):
raise stem.ProtocolError("SERVERHASH has an invalid value: %s" % value)
self.server_hash = binascii.a2b_hex(stem.util.str_tools._to_bytes(value))
else:
raise stem.ProtocolError("Missing SERVERHASH mapping: %s" % line)
if line.is_next_mapping("SERVERNONCE"):
value = line.pop_mapping()[1]
if not stem.util.tor_tools.is_hex_digits(value, 64):
raise stem.ProtocolError("SERVERNONCE has an invalid value: %s" % value)
self.server_nonce = binascii.a2b_hex(stem.util.str_tools._to_bytes(value))
else:
raise stem.ProtocolError("Missing SERVERNONCE mapping: %s" % line)
|
ouzel/stem
|
stem/response/authchallenge.py
|
Python
|
lgpl-3.0
| 1,910
|
import xml.dom.minidom as dom
from kml import kml
import json
import sys
import traceback
def kml_to_geojson(kml_file, output_name):
kml_one = dom.parse(kml_file) # parse KML file using minidom
if not output_name.endswith(".geojson"):
output_name += ".geojson"
with open( output_name, 'w') as outfile:
output = kml.build_feature_collection(kml_one)
json.dump(output, outfile, indent=4)
if __name__ == '__main__':
try:
if len(sys.argv) == 1:
print "Input KML not specified"
elif len(sys.argv) == 2:
print "Input KML specified as", sys.argv[1], ". Output GeoJSON file name not specified"
else:
input_kml = str(sys.argv[1])
output_name = str(sys.argv[2])
print "Converting", input_kml, "to", output_name + ".geojson"
kml_to_geojson(input_kml, output_name)
print "...Done!"
except:
error = sys.exc_info()[0]
print "There was an error: ", error, "\n"
print traceback.format_exc()
|
JamesMilnerUK/Loxo
|
conversiontools/kml2geojson.py
|
Python
|
mit
| 1,060
|
#!/usr/bin/env python3
"""
A program to count Sundays in a century
"""
sun=0
mon=1
tue=2
wed=3
thu=4
fri=5
sat=6
def check_leap_year(year):
if year%100==0:
year/=100
if year%4==0:
return True
else:
return False
class Year(object):
def __init__(self, year, first_day):
self.year=year
self.first_day=first_day
if check_leap_year(year):
self.leap=True
else:
self.leap=False
def get_next_year(self):
if self.leap:
return Year(self.year+1,(self.first_day+2)%7)
else:
return Year(self.year+1,(self.first_day+1)%7)
def get_prev_year(self):
if check_leap_year(self.year-1):
return Year(self.year-1,(self.first_day-2)%7)
else:
return Year(self.year-1,(self.first_day-1)%7)
def get_day(self, month, date):
if month==1:
return (self.first_day+(date-1))%7
elif month==2:
return (self.get_day(month-1,31)+date)%7
elif month==3:
if self.leap:
return (self.get_day(month-1,29)+date)%7
else:
return (self.get_day(month-1,28)+date)%7
elif month==4:
return (self.get_day(month-1,31)+date)%7
elif month==5:
return (self.get_day(month-1,30)+date)%7
elif month==6:
return (self.get_day(month-1,31)+date)%7
elif month==7:
return (self.get_day(month-1,30)+date)%7
elif month==8:
return (self.get_day(month-1,31)+date)%7
elif month==9:
return (self.get_day(month-1,31)+date)%7
elif month==10:
return (self.get_day(month-1,30)+date)%7
elif month==11:
return (self.get_day(month-1,31)+date)%7
elif month==12:
return (self.get_day(month-1,30)+date)%7
def get_year_object(year):
year_object=Year(1900,1)
if year==1900:
return year_object
elif year>1900:
while year_object.year<year:
year_object=year_object.get_next_year()
elif year<1900:
while year_object.year>year:
year_object=year_object.get_prev_year()
return year_object
|
Bolt64/my_code
|
euler/counting_sundays.py
|
Python
|
mit
| 2,247
|
#Faça um programa que receba dois números inteiros e gere os números inteiros que estão no intervalo compreendido por eles.
a=int(input('valor incial'))
print (a)
b=int(input('valor final'))
print (b)
while a<b:
print(a)
a=a+1
|
erikaklein/algoritmo---programas-em-Python
|
GerarNumeroNoIntervalo.py
|
Python
|
mit
| 251
|
# Copyright 2012 Dorival de Moraes Pedroso. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from __future__ import print_function # for Python 3
import sys
from numpy import linspace, zeros, sin, pi
from pylab import axis, show
from tlfem.genmesh import Gen2Dregion, JoinAlongEdge
from tlfem.solver import Solver
from tlfem.mesh import Mesh
def runtest(prob):
if prob==1:
tri = 0
if 1:
nxa = 3 #5
nxb = 5 #9
nya = 3
nyb = 3
else:
nxa = 11
nxb = nxa + (nxa-1)
nya = 7
nyb = 4
x = linspace(0.,4.,nxa)
y = linspace(0.,2.,nya)
m = Gen2Dregion(x, y, triangle=tri)
m.gen_mid_verts()
x = linspace(0.,4., nxb)
y = linspace(2.,2.5,nyb)
b = Gen2Dregion(x, y, ctag=-2, triangle=tri, etags=[-10,-11,-14,-13])
m.join_along_edge(b, -12, -10)
m.check_overlap()
m.draw(pr=1); m.show()
E, nu = 10.0, 0.25
if tri:
p = {-1:{'type':'Eelastic2D', 'E':E, 'nu':nu, 'geom':'tri6'},
-2:{'type':'Eelastic2D', 'E':E, 'nu':nu, 'geom':'tri3'}}
else:
p = {-1:{'type':'Eelastic2D', 'E':E, 'nu':nu, 'geom':'qua8'},
-2:{'type':'Eelastic2D', 'E':E, 'nu':nu, 'geom':'qua4'}}
s = Solver(m, p)
eb = {-10:{'uy':0.}, -13:{'ux':0.}, -14:{'qnqt':(-1.,0.)}}
s.set_bcs(eb=eb)
s.solve_steady(extrap=True)#, vtu_fnkey='patchrecov1')
#PrintTable(s.Uout)
#PrintTable(s.Eout)
if 0:
U0, V0 = zeros(s.neqs), zeros(s.neqs)
s.solve_dynamics(0., U0, V0, 1., 0.01)#, vtu_fnkey='patchrecov_dyn1')
if prob==2:
tri = 0
o2 = 1
x = linspace(0.,4.,5)
y = linspace(0.,2.,3)
m = Gen2Dregion(x,y, triangle=tri)
m.tag_vert(-100, 1., 2.)
m.tag_vert(-101, 3., 2.)
m.tag_edges_on_line(-14, 0.,1.,0.)
if o2: m.gen_mid_verts()
m.edges_to_lincells(-12, -2)
vid0 = m.get_verts(-100)[0]
vid1 = m.get_verts(-101)[0]
V = [[m.nv, -200, 1., 3.],
[m.nv+1, -201, 2., 3.],
[m.nv+2, -202, 3., 3.]]
C = [[m.nc, -3, [vid0, m.nv ]],
[m.nc+1, -4, [m.nv, m.nv+1]],
[m.nc+2, -4, [m.nv+1, m.nv+2]],
[m.nc+3, -3, [m.nv+2, vid1 ]]]
m.extend(V, C)
m.check_overlap()
m.draw(pr=1)
#m.show()
if tri: geom = 'tri6' if o2 else 'tri3'
else: geom = 'qua8' if o2 else 'qua4'
E, nu = 10.0, 0.25
Eb, A, I = 100.0, 1., 1.
p = {-1:{'type':'Eelastic2D', 'E':E, 'nu':nu, 'geom':geom},
-2:{'type':'EelasticBeam', 'E':Eb, 'A':A, 'I':I},
-3:{'type':'EelasticBeam', 'E':Eb, 'A':A, 'I':I},
-4:{'type':'EelasticBeam', 'E':Eb, 'A':A, 'I':I, 'qnqt':(-1.,-1.,0.)}}
s = Solver(m, p)
eb = {-10:{'uy':0.}, -11:{'ux':0.}, -13:{'ux':0.}}
if 0:
s.set_bcs(eb=eb, vb={-101:{'fy':lambda t:sin(pi*t)}})
o = s.solve_dynamics(0., 1., 0.01, dtout=0.5, extrap=True)#, vtu_fnkey='patchrecov_dyn2')
else:
s.set_bcs(eb=eb)
o = s.solve_steady(extrap=1)#, vtu_fnkey='patchrecov2')
o.beam_moments()
o.beam_print()
axis('equal')
show()
# run tests
prob = int(sys.argv[1]) if len(sys.argv)>1 else -1
if prob < 0:
for p in range(1,3):
print()
print('[1;33m####################################### %d #######################################[0m'%p)
print()
runtest(p)
else: runtest(prob)
|
cpmech/tlfem
|
tlfem/test/t_patchrecov.py
|
Python
|
bsd-3-clause
| 3,807
|
#!/usr/bin/python
r"""
PYTHONRC
========
Initialization script for the interactive Python interpreter. Its main purpose
is to enhance the overall user experience when working in such an environment
by adding some niceties to the standard console.
It also works with IPython and BPython, although its utility in that kind of
scenarios can be argued.
Tested in GNU/Linux with Python versions 2.7 and 3.4.
Please read the Installation section below.
Features
--------
- User input completion
+ Introduces a completion mechanism for inputted commands in Python 2.
+ In Python 3, where the standard console is a lot nicer, it just
impersonates the default completion machinery to keep the consistency with
the behavior in Python 2 (and so it's still possible to adapt it to the
user's needs).
- Command History
+ Creates a callable, singleton object called `history`, placing it into
the `__builtins__` object to make it easily available, which enables the
handling of the command history (saving some input lines to a file of your
choice, listing the commands introduced so far, etc.). Try simply
`history()` on the Python prompt to see it in action; inspect its members
(with `dir(history)` or `help(history.write)`) for more information.
- Color prompt
+ Puts a colorful prompt in place, if the terminal supports it.
- Implementation of a bash's "operate-and-get-next" clone
+ Enables a quick re-edition of a code block from the history by
successive keypresses of the `Ctrl-o` hotkey.
Installation
------------
- You must define in your environment (in GNU/Linux and MacOS X that usually
means your `~/.bashrc` file) the variable 'PYTHONSTARTUP' containing the path
to `pythonrc.py`.
- It is also highly recommended to define the variable 'PYTHON_HISTORY_FILE'.
Remember that BPython (unlike the standard interpreter or IPython) ignores that
variable, so you'll have to configure it as well by other means to be able to
use the same history file there (for instance, in Linux, the file
`~/.config/bpython/config` is a good place to start, but please read BPython's
documentation).
### Example configurations
- Extract of `~/.bashrc`
```sh
# python
export PYTHONSTARTUP=~/.python/pythonrc.py
export PYTHON_HISTORY_FILE=~/.python/.python_history
## You may want to also uncomment some of this lines if using an old
## version of virtualenvwrapper
# export VIRTUALENVWRAPPER_PYTHON=/usr/bin/python3.4
# export WORKON_HOME=~/.python/virtualenvs
# source $(which virtualenvwrapper.sh)
```
- Extract of `~/.config/bpython/config`
```
[general]
color_scheme = default
hist_file = ~/.python/.python_history
hist_lenght = 1000
```
Bugs / Caveats / Future enhancements
------------------------------------
- No module/package introspection for the last argument in commands of the form
`from <package> import <not_completing_this>` (this, in fact, could be a not so
bad thing, because it doesn't execute side effects, e.g. modules' init code).
- Depending on the user's system, the compilation of the packages' and modules'
list for completing `import ...` and `from ... import ...` commands can take a
long time, especially the first time it is invoked.
- When completing things like a method's name, the default is to also include
the closing parenthesis along with the opening one, but the cursor is placed
after it no matter what, instead of between them. This is because of the
python module `readline`'s limitations.
You can turn off the inclusion of the closing parenthesis; if you do so, you
might be also interested in modifying the variable called
`dict_keywords_postfix` (especially the strings that act as that dictionary's
indexes).
- IPython has its own `%history` magic. I did my best to not interfere with
it, but I don't know the actual consequences. Also, it's debatable if it
even makes sense to use this file with IPython and/or BPython (though having
a unified history for all the environments is really nice).
You could define some bash aliases like
```sh
alias ipython='PYTHONSTARTUP="" ipython'
alias bpython='PYTHONSTARTUP="" bpython'
```
to be on the safer side.
- Could have used the module `six` for better clarity. Right now it uses my own
made up stubs to work on both Python 2 and 3.
- Needs better comments and documentation, especially the part on history
handling.
- Probably a lot more. Feel free to file bug reports ;-)
"""
def init():
# color prompt
import sys
import os
term_with_colors = ['xterm', 'xterm-color', 'xterm-256color', 'linux',
'screen', 'screen-256color', 'screen-bce']
red = ''
green = ''
reset = ''
if os.environ.get('TERM') in term_with_colors:
escapes_pattern = '\001\033[%sm\002' # \001 and \002 mark non-printing
red = escapes_pattern % '31'
green = escapes_pattern % '32'
reset = escapes_pattern % '0'
sys.ps1 = red + '>>> ' + reset
sys.ps2 = green + '... ' + reset
red = red.strip('\001\002')
green = green.strip('\001\002')
reset = reset.strip('\001\002')
# readline (tab-completion, history)
try:
import readline
except ImportError:
print(red + "Module 'readline' not available. Skipping user customizations." + reset)
return
import rlcompleter
import atexit
from pwd import getpwall
from os.path import isfile, isdir, expanduser, \
join as joinpath, split as splitpath, sep as pathsep
default_history_file = '~/.pythonhist'
majver = sys.version_info[0]
# Both BPython and Django shell change the nature of the __builtins__
# object. This hack workarounds that:
def builtin_setattr(attr, value):
if hasattr(__builtins__, '__dict__'):
setattr(__builtins__, attr, value)
else:
__builtins__[attr] = value
def builtin_getattr(attr):
if hasattr(__builtins__, '__dict__'):
return getattr(__builtins__, attr)
else:
return __builtins__[attr]
# My own "six" library, where I define the following stubs:
# * myrange for xrange() (python2) / range() (python3)
# * exec_stub for exec()
# * iteritems for dict.iteritems() (python2) / list(dict.items()) (python3)
# I could have done "from six import iteritems" and such instead of this
if majver == 2:
myrange = xrange
def exec_stub(textcode, globalz=None, localz=None):
# the parenthesis make it valid python3 syntax, do nothing at all
exec (textcode) in globalz, localz
def iteritems(d):
return d.iteritems()
elif majver == 3:
myrange = range
# def exec_stub(textcode, globalz=None, localz=None):
# # the "in" & "," make it valid python2 syntax, do nothing useful
# exec(textcode, globalz, localz) in globalz #, localz
# the three previous lines work, but this is better
exec_stub = builtin_getattr('exec')
def iteritems(d):
return list(d.items())
# AUXILIARY CLASSES
# History management
class History:
set_length = readline.set_history_length
get_length = readline.get_history_length
get_current_length = readline.get_current_history_length
get_item = readline.get_history_item
write = readline.write_history_file
def __init__(self, path=default_history_file, length=500):
self.path = path
self.reload(path)
self.set_length(length)
def __exit__(self):
print("Saving history (%s)..." % self.path)
self.write(expanduser(self.path))
def __repr__(self):
"""print out current history information"""
# length = self.get_current_length()
# command = self.get_item(length)
# if command == 'history':
# return "\n".join(self.get_item(i)
# for i in myrange(1, length+1))
# else:
# return '<%s instance>' % str(self.__class__)
return '<%s instance>' % str(self.__class__)
def __call__(self, pos=None, end=None):
"""print out current history information with line number"""
if not pos:
pos = 1
elif not end:
end = pos
for i, item in self.iterator(pos, end, enumerate_it=True):
print('%i:\t%s' % (i, item))
def iterator(self, pos, end, enumerate_it=False):
length = self.get_current_length()
if not pos:
pos = 1
if not end:
end = length
pos = min(pos, length)
if pos < 0:
pos = max(1, pos + length + 1)
end = min(end, length)
if end < 0:
end = max(1, end + length + 1)
if enumerate_it:
return ((i, self.get_item(i)) for i in myrange(pos, end + 1))
else:
return (self.get_item(i) for i in myrange(pos, end + 1))
def reload(self, path=""):
"""clear the current history and reload it from saved"""
readline.clear_history()
if isfile(path):
self.path = path
readline.read_history_file(expanduser(self.path))
def save(self, filename, pos=None, end=None):
"""write history number from pos to end into filename file"""
with open(filename, 'w') as f:
for item in self.iterator(pos, end):
f.write(item)
f.write('\n')
def execute(self, pos, end=None):
"""execute history number from pos to end"""
if not end:
end = pos
commands = []
for item in self.iterator(pos, end):
commands.append(item)
readline.add_history(item)
exec_stub("\n".join(commands), globals())
# comment the previous two lines and uncomment those below
# if you prefer to re-add to history just the commands that
# executed without problems
# try:
# exec_stub("\n".join(commands), globals())
# except:
# raise
# else:
# for item in commands:
# readline.add_history(cmdlist)
# Activate completion and make it smarter
class Irlcompleter(rlcompleter.Completer):
"""
This class enables the insertion of "indentation" if there's no text
for completion.
The default "indentation" is four spaces. You can initialize with '\t'
as the tab if you wish to use a genuine tab.
Also, compared to the default rlcompleter, this one performs some
additional useful things, like file completion for string constants
and addition of some decorations to keywords (namely, closing
parenthesis, and whatever you've defined in dict_keywords_postfix --
spaces, colons, etc.)
"""
def __init__(
self,
indent_str=' ',
delims=readline.get_completer_delims(),
binds=('tab: complete', ),
dict_keywords_postfix={" ": ["import", "from"], },
add_closing_parenthesis=True
):
rlcompleter.Completer.__init__(self, namespace=globals())
readline.set_completer_delims(delims)
self.indent_str_list = [indent_str, None]
for bind in binds:
readline.parse_and_bind(bind)
self.dict_keywords_postfix = dict_keywords_postfix
self.add_closing_parenthesis = add_closing_parenthesis
def complete(self, text, state):
line = readline.get_line_buffer()
stripped_line = line.lstrip()
# libraries
if stripped_line.startswith('import '):
value = self.complete_libs(text, state)
elif stripped_line.startswith('from '):
pos = readline.get_begidx()
# end = readline.get_endidx()
if line[:pos].strip() == 'from':
value = self.complete_libs(text, state) + " "
elif state == 0 and line.find(' import ') == -1:
value = 'import '
else:
# Here we could do module introspection (ugh)
value = None
# indentation, files and keywords/identifiers
elif text == '':
value = self.indent_str_list[state]
elif text[0] in ('"', "'"):
value = self.complete_files(text, state)
else:
value = self.complete_keywords(text, state)
return value
def complete_keywords(self, text, state):
txt = rlcompleter.Completer.complete(self, text, state)
if txt is None:
return None
if txt.endswith('('):
if self.add_closing_parenthesis:
return txt + ')'
else:
return txt
for postfix, words in iteritems(self.dict_keywords_postfix):
if txt in words:
return txt + postfix
return txt
def complete_files(self, text, state):
str_delim = text[0]
path = text[1:]
if path.startswith("~/"):
path = expanduser("~/") + path[2:]
elif path.startswith("~"):
i = path.find(pathsep)
if i > 0:
path = expanduser(path[:i]) + path[i:]
else:
return [
str_delim + "~" + i[0] + pathsep
for i in getpwall()
if i[0].startswith(path[1:])
][state]
dir, fname = splitpath(path)
if not dir:
dir = os.curdir
return [
str_delim + joinpath(dir, i)
for i in os.listdir(dir)
if i.startswith(fname)
][state]
def complete_libs(self, text, state):
libs = {}
for i in sys.path:
try:
if i == '':
i = os.curdir
files = os.listdir(i)
for j in files:
filename = joinpath(i, j)
if isfile(filename):
for s in [".py", ".pyc", ".so"]:
if j.endswith(s):
j = j[:-len(s)]
pos = j.find(".")
if pos > 0:
j = j[:pos]
libs[j] = None
break
elif isdir(filename):
for s in ["__init__.py", "__init__.pyc"]:
if isfile(joinpath(filename, s)):
libs[j] = None
except OSError:
pass
for j in sys.builtin_module_names:
libs[j] = None
libs = sorted(j for j in libs.keys() if j.startswith(text))
return libs[state]
# DEFINITIONS:
# history file path and length
history_length = 1000
history_path = os.getenv("PYTHON_HISTORY_FILE", default_history_file)
# bindings for readline (assign completion key, etc.)
# readline_binds = (
# 'tab: tab_complete',
# '"\C-o": operate-and-get-next', # exists in bash but not in readline
# )
# completion delimiters
# we erase ", ', ~ and / so file completion works
# readline_delims = ' \t\n`!@#$%^&*()-=+[{]}\\|;:,<>?'
readline_delims = readline.get_completer_delims()\
.replace("~", "", 1)\
.replace("/", "", 1)\
.replace("'", "", 1)\
.replace('"', '', 1)
# dictionary of keywords to be postfixed by a string
dict_keywords_postfix = {
":": ["else", "try", "finally", ],
" ": ["import", "from", "or", "and", "not", "if", "elif", ],
" ():": ["def", ] # "class", ]
}
# DO IT
completer = Irlcompleter(delims=readline_delims, # binds=readline_binds,
dict_keywords_postfix=dict_keywords_postfix)
readline.set_completer(completer.complete)
if not os.access(history_path, os.F_OK):
print(green + 'History file %s does not exist. Creating it...' % history_path + reset)
with open(history_path, 'w') as f:
pass
elif not os.access(history_path, os.R_OK|os.W_OK):
print(red + 'History file %s has wrong permissions!' % history_path + reset)
history = History(history_path, history_length)
#
# Hack: Implementation of bash-like "operate-and-get-next" (Ctrl-o)
#
try:
# We'll hook the C functions that we need from the underlying
# libreadline implementation that aren't exposed by the readline
# python module.
from ctypes import CDLL, CFUNCTYPE, c_int
librl = CDLL(readline.__file__)
rl_callback = CFUNCTYPE(c_int, c_int, c_int)
rl_int_void = CFUNCTYPE(c_int)
readline.add_defun = librl.rl_add_defun # didn't bother to define args
readline.accept_line = rl_callback(librl.rl_newline)
readline.previous_history = rl_callback(librl.rl_get_previous_history)
readline.where_history = rl_int_void(librl.where_history)
def pre_input_hook_factory(offset, char):
def rewind_history_pre_input_hook():
# Uninstall this hook, rewind history and redisplay
readline.set_pre_input_hook(None)
result = readline.previous_history(offset, char)
readline.redisplay()
return result
return rewind_history_pre_input_hook
@rl_callback
def operate_and_get_next(count, char):
current_line = readline.where_history()
offset = readline.get_current_history_length() - current_line
# Accept the current line and set the hook to rewind history
result = readline.accept_line(1, char)
readline.set_pre_input_hook(pre_input_hook_factory(offset, char))
return result
# Hook our function to Ctrl-o, and hold a reference to it to avoid GC
readline.add_defun('operate-and-get-next', operate_and_get_next, ord("O") & 0x1f)
history._readline_functions = [operate_and_get_next]
except (ImportError, OSError, AttributeError) as e:
print(red + """
Couldn't either bridge the needed methods from binary 'readline'
or properly install our implementation of 'operate-and-get-next'.
Skipping the hack. Underlying error:
""" + reset + repr(e))
builtin_setattr('history', history)
atexit.register(history.__exit__)
# run the initialization and clean up the environment afterwards
init()
del init
|
0xf4/pythonrc
|
pythonrc.py
|
Python
|
mit
| 19,310
|
from wheelcms_axle.content import Content, FileContent, ImageContent
from wheelcms_axle.spoke import Spoke, action, FileSpoke
from wheelcms_axle.content import type_registry
from django.db import models
class Type1(Content):
t1field = models.TextField(null=True, blank=True)
class Type1Type(Spoke):
model = Type1
discussable = True
@action
def hello(self, handler, request, action):
return ("Hello", request, handler, self, action)
class Type2(Content):
pass
class Type2Type(Spoke):
model = Type2
discussable = False
class TestFile(FileContent):
storage = models.FileField(upload_to="files", blank=False)
class TestFileType(FileSpoke):
model = TestFile
children = ()
class OtherTestFile(FileContent):
storage = models.FileField(upload_to="files", blank=False)
class OtherTestFileType(FileSpoke):
model = OtherTestFile
children = ()
class TestImage(ImageContent):
storage = models.ImageField(upload_to="images", blank=False)
class TestImageType(FileSpoke):
model = TestImage
children = ()
class OtherTestImage(ImageContent):
storage = models.ImageField(upload_to="images", blank=False)
class OtherTestImageType(FileSpoke):
model = OtherTestImage
children = ()
class TypeM2M(Content):
m2m = models.ManyToManyField("self")
class TypeM2MType(Spoke):
model = TypeM2M
class TypeUnique(Content):
uniek = models.TextField(unique=True)
class TypeUniqueType(Spoke):
model = TypeUnique
type_registry.register(Type1Type)
type_registry.register(Type2Type)
type_registry.register(TestFileType)
type_registry.register(TestImageType)
type_registry.register(OtherTestFileType)
type_registry.register(OtherTestImageType)
type_registry.register(TypeM2MType)
type_registry.register(TypeUniqueType)
from wheelcms_axle.models import Configuration as BaseConfiguration
from wheelcms_axle.registries.configuration import configuration_registry
class Configuration(models.Model):
main = models.ForeignKey(BaseConfiguration, related_name="testconf")
value = models.TextField(blank=True)
configuration_registry.register("testconf", "ConfTest", Configuration, None)
|
wheelcms/wheelcms_axle
|
wheelcms_axle/tests/models.py
|
Python
|
bsd-2-clause
| 2,181
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Volume v2 Backup action implementations"""
import copy
import functools
import logging
from cinderclient import api_versions
from cliff import columns as cliff_columns
from osc_lib.cli import parseractions
from osc_lib.command import command
from osc_lib import exceptions
from osc_lib import utils
from openstackclient.i18n import _
LOG = logging.getLogger(__name__)
class VolumeIdColumn(cliff_columns.FormattableColumn):
"""Formattable column for volume ID column.
Unlike the parent FormattableColumn class, the initializer of the
class takes volume_cache as the second argument.
osc_lib.utils.get_item_properties instantiate cliff FormattableColumn
object with a single parameter "column value", so you need to pass
a partially initialized class like
``functools.partial(VolumeIdColumn, volume_cache)``.
"""
def __init__(self, value, volume_cache=None):
super(VolumeIdColumn, self).__init__(value)
self._volume_cache = volume_cache or {}
def human_readable(self):
"""Return a volume name if available
:rtype: either the volume ID or name
"""
volume_id = self._value
volume = volume_id
if volume_id in self._volume_cache.keys():
volume = self._volume_cache[volume_id].name
return volume
class CreateVolumeBackup(command.ShowOne):
_description = _("Create new volume backup")
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.add_argument(
"volume",
metavar="<volume>",
help=_("Volume to backup (name or ID)")
)
parser.add_argument(
"--name",
metavar="<name>",
help=_("Name of the backup")
)
parser.add_argument(
"--description",
metavar="<description>",
help=_("Description of the backup")
)
parser.add_argument(
"--container",
metavar="<container>",
help=_("Optional backup container name")
)
parser.add_argument(
"--snapshot",
metavar="<snapshot>",
help=_("Snapshot to backup (name or ID)")
)
parser.add_argument(
'--force',
action='store_true',
default=False,
help=_("Allow to back up an in-use volume")
)
parser.add_argument(
'--incremental',
action='store_true',
default=False,
help=_("Perform an incremental backup")
)
parser.add_argument(
'--no-incremental',
action='store_false',
help=_("Do not perform an incremental backup")
)
parser.add_argument(
'--property',
metavar='<key=value>',
action=parseractions.KeyValueAction,
dest='properties',
help=_(
'Set a property on this backup '
'(repeat option to remove multiple values) '
'(supported by --os-volume-api-version 3.43 or above)'
),
)
parser.add_argument(
'--availability-zone',
metavar='<zone-name>',
help=_(
'AZ where the backup should be stored; by default it will be '
'the same as the source '
'(supported by --os-volume-api-version 3.51 or above)'
),
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
volume_id = utils.find_resource(
volume_client.volumes, parsed_args.volume,
).id
kwargs = {}
if parsed_args.snapshot:
kwargs['snapshot_id'] = utils.find_resource(
volume_client.volume_snapshots, parsed_args.snapshot,
).id
if parsed_args.properties:
if volume_client.api_version < api_versions.APIVersion('3.43'):
msg = _(
'--os-volume-api-version 3.43 or greater is required to '
'support the --property option'
)
raise exceptions.CommandError(msg)
kwargs['metadata'] = parsed_args.properties
if parsed_args.availability_zone:
if volume_client.api_version < api_versions.APIVersion('3.51'):
msg = _(
'--os-volume-api-version 3.51 or greater is required to '
'support the --availability-zone option'
)
raise exceptions.CommandError(msg)
kwargs['availability_zone'] = parsed_args.availability_zone
backup = volume_client.backups.create(
volume_id,
container=parsed_args.container,
name=parsed_args.name,
description=parsed_args.description,
force=parsed_args.force,
incremental=parsed_args.incremental,
**kwargs,
)
backup._info.pop("links", None)
return zip(*sorted(backup._info.items()))
class DeleteVolumeBackup(command.Command):
_description = _("Delete volume backup(s)")
def get_parser(self, prog_name):
parser = super(DeleteVolumeBackup, self).get_parser(prog_name)
parser.add_argument(
"backups",
metavar="<backup>",
nargs="+",
help=_("Backup(s) to delete (name or ID)")
)
parser.add_argument(
'--force',
action='store_true',
default=False,
help=_("Allow delete in state other than error or available")
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
result = 0
for i in parsed_args.backups:
try:
backup_id = utils.find_resource(
volume_client.backups, i,
).id
volume_client.backups.delete(backup_id, parsed_args.force)
except Exception as e:
result += 1
LOG.error(_("Failed to delete backup with "
"name or ID '%(backup)s': %(e)s")
% {'backup': i, 'e': e})
if result > 0:
total = len(parsed_args.backups)
msg = _("%(result)s of %(total)s backups failed to delete.") % {
'result': result, 'total': total,
}
raise exceptions.CommandError(msg)
class ListVolumeBackup(command.Lister):
_description = _("List volume backups")
def get_parser(self, prog_name):
parser = super(ListVolumeBackup, self).get_parser(prog_name)
parser.add_argument(
"--long",
action="store_true",
default=False,
help=_("List additional fields in output")
)
parser.add_argument(
"--name",
metavar="<name>",
help=_("Filters results by the backup name")
)
parser.add_argument(
"--status",
metavar="<status>",
choices=[
'creating', 'available', 'deleting',
'error', 'restoring', 'error_restoring',
],
help=_(
"Filters results by the backup status, one of: "
"creating, available, deleting, error, restoring or "
"error_restoring"
),
)
parser.add_argument(
"--volume",
metavar="<volume>",
help=_(
"Filters results by the volume which they backup (name or ID)"
),
)
parser.add_argument(
'--marker',
metavar='<volume-backup>',
help=_('The last backup of the previous page (name or ID)'),
)
parser.add_argument(
'--limit',
type=int,
action=parseractions.NonNegativeAction,
metavar='<num-backups>',
help=_('Maximum number of backups to display'),
)
parser.add_argument(
'--all-projects',
action='store_true',
default=False,
help=_('Include all projects (admin only)'),
)
# TODO(stephenfin): Add once we have an equivalent command for
# 'cinder list-filters'
# parser.add_argument(
# '--filter',
# metavar='<key=value>',
# action=parseractions.KeyValueAction,
# dest='filters',
# help=_(
# "Filter key and value pairs. Use 'foo' to "
# "check enabled filters from server. Use 'key~=value' for "
# "inexact filtering if the key supports "
# "(supported by --os-volume-api-version 3.33 or above)"
# ),
# )
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
columns = ('id', 'name', 'description', 'status', 'size')
column_headers = ('ID', 'Name', 'Description', 'Status', 'Size')
if parsed_args.long:
columns += ('availability_zone', 'volume_id', 'container')
column_headers += ('Availability Zone', 'Volume', 'Container')
# Cache the volume list
volume_cache = {}
try:
for s in volume_client.volumes.list():
volume_cache[s.id] = s
except Exception:
# Just forget it if there's any trouble
pass
_VolumeIdColumn = functools.partial(
VolumeIdColumn, volume_cache=volume_cache)
filter_volume_id = None
if parsed_args.volume:
filter_volume_id = utils.find_resource(
volume_client.volumes, parsed_args.volume,
).id
marker_backup_id = None
if parsed_args.marker:
marker_backup_id = utils.find_resource(
volume_client.backups, parsed_args.marker,
).id
search_opts = {
'name': parsed_args.name,
'status': parsed_args.status,
'volume_id': filter_volume_id,
'all_tenants': parsed_args.all_projects,
}
data = volume_client.backups.list(
search_opts=search_opts,
marker=marker_backup_id,
limit=parsed_args.limit,
)
return (
column_headers,
(
utils.get_item_properties(
s, columns, formatters={'volume_id': _VolumeIdColumn},
) for s in data
),
)
class RestoreVolumeBackup(command.ShowOne):
_description = _("Restore volume backup")
def get_parser(self, prog_name):
parser = super(RestoreVolumeBackup, self).get_parser(prog_name)
parser.add_argument(
"backup",
metavar="<backup>",
help=_("Backup to restore (name or ID)")
)
parser.add_argument(
"volume",
metavar="<volume>",
help=_("Volume to restore to (name or ID)")
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
backup = utils.find_resource(volume_client.backups, parsed_args.backup)
destination_volume = utils.find_resource(volume_client.volumes,
parsed_args.volume)
backup = volume_client.restores.restore(backup.id,
destination_volume.id)
return zip(*sorted(backup._info.items()))
class SetVolumeBackup(command.Command):
_description = _("Set volume backup properties")
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.add_argument(
"backup",
metavar="<backup>",
help=_("Backup to modify (name or ID)")
)
parser.add_argument(
'--name',
metavar='<name>',
help=_(
'New backup name'
'(supported by --os-volume-api-version 3.9 or above)'
),
)
parser.add_argument(
'--description',
metavar='<description>',
help=_(
'New backup description '
'(supported by --os-volume-api-version 3.9 or above)'
),
)
parser.add_argument(
'--state',
metavar='<state>',
choices=['available', 'error'],
help=_(
'New backup state ("available" or "error") (admin only) '
'(This option simply changes the state of the backup '
'in the database with no regard to actual status; '
'exercise caution when using)'
),
)
parser.add_argument(
'--no-property',
action='store_true',
help=_(
'Remove all properties from this backup '
'(specify both --no-property and --property to remove the '
'current properties before setting new properties)'
),
)
parser.add_argument(
'--property',
metavar='<key=value>',
action=parseractions.KeyValueAction,
dest='properties',
default={},
help=_(
'Set a property on this backup '
'(repeat option to set multiple values) '
'(supported by --os-volume-api-version 3.43 or above)'
),
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
backup = utils.find_resource(
volume_client.backups, parsed_args.backup)
result = 0
if parsed_args.state:
try:
volume_client.backups.reset_state(
backup.id, parsed_args.state)
except Exception as e:
LOG.error(_("Failed to set backup state: %s"), e)
result += 1
kwargs = {}
if parsed_args.name:
if volume_client.api_version < api_versions.APIVersion('3.9'):
msg = _(
'--os-volume-api-version 3.9 or greater is required to '
'support the --name option'
)
raise exceptions.CommandError(msg)
kwargs['name'] = parsed_args.name
if parsed_args.description:
if volume_client.api_version < api_versions.APIVersion('3.9'):
msg = _(
'--os-volume-api-version 3.9 or greater is required to '
'support the --description option'
)
raise exceptions.CommandError(msg)
kwargs['description'] = parsed_args.description
if parsed_args.no_property:
if volume_client.api_version < api_versions.APIVersion('3.43'):
msg = _(
'--os-volume-api-version 3.43 or greater is required to '
'support the --no-property option'
)
raise exceptions.CommandError(msg)
if parsed_args.properties:
if volume_client.api_version < api_versions.APIVersion('3.43'):
msg = _(
'--os-volume-api-version 3.43 or greater is required to '
'support the --property option'
)
raise exceptions.CommandError(msg)
if volume_client.api_version >= api_versions.APIVersion('3.43'):
metadata = copy.deepcopy(backup.metadata)
if parsed_args.no_property:
metadata = {}
metadata.update(parsed_args.properties)
kwargs['metadata'] = metadata
if kwargs:
try:
volume_client.backups.update(backup.id, **kwargs)
except Exception as e:
LOG.error("Failed to update backup: %s", e)
result += 1
if result > 0:
msg = _("One or more of the set operations failed")
raise exceptions.CommandError(msg)
class UnsetVolumeBackup(command.Command):
"""Unset volume backup properties.
This command requires ``--os-volume-api-version`` 3.43 or greater.
"""
def get_parser(self, prog_name):
parser = super().get_parser(prog_name)
parser.add_argument(
'backup',
metavar='<backup>',
help=_('Backup to modify (name or ID)')
)
parser.add_argument(
'--property',
metavar='<key>',
action='append',
dest='properties',
help=_(
'Property to remove from this backup '
'(repeat option to unset multiple values) '
),
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
if volume_client.api_version < api_versions.APIVersion('3.43'):
msg = _(
'--os-volume-api-version 3.43 or greater is required to '
'support the --property option'
)
raise exceptions.CommandError(msg)
backup = utils.find_resource(
volume_client.backups, parsed_args.backup)
metadata = copy.deepcopy(backup.metadata)
for key in parsed_args.properties:
if key not in metadata:
# ignore invalid properties but continue
LOG.warning(
"'%s' is not a valid property for backup '%s'",
key, parsed_args.backup,
)
continue
del metadata[key]
kwargs = {
'metadata': metadata,
}
volume_client.backups.update(backup.id, **kwargs)
class ShowVolumeBackup(command.ShowOne):
_description = _("Display volume backup details")
def get_parser(self, prog_name):
parser = super(ShowVolumeBackup, self).get_parser(prog_name)
parser.add_argument(
"backup",
metavar="<backup>",
help=_("Backup to display (name or ID)")
)
return parser
def take_action(self, parsed_args):
volume_client = self.app.client_manager.volume
backup = utils.find_resource(volume_client.backups,
parsed_args.backup)
backup._info.pop("links", None)
return zip(*sorted(backup._info.items()))
|
openstack/python-openstackclient
|
openstackclient/volume/v2/volume_backup.py
|
Python
|
apache-2.0
| 19,343
|
import MySQLdb
from MySQLdb.connections import *
class DirectMySQLdbConnection(Connection):
### DCR: from MySQLdb connections.py Connection.__init__
def __init__(self, *args, **kwargs):
### DCR: fixed up relative imports
from MySQLdb.constants import CLIENT, FIELD_TYPE
from MySQLdb.converters import conversions
from weakref import proxy, WeakValueDictionary
import types
kwargs2 = kwargs.copy()
conv = kwargs.get('conv', conversions)
kwargs2['conv'] = dict([ (k, v) for k, v in conv.items()
if type(k) is int ])
self.cursorclass = kwargs2.pop('cursorclass', self.default_cursor)
charset = kwargs2.pop('charset', '')
if charset:
use_unicode = True
else:
use_unicode = False
use_unicode = kwargs2.pop('use_unicode', use_unicode)
sql_mode = kwargs2.pop('sql_mode', '')
client_flag = kwargs.get('client_flag', 0)
### DCR: fixed up module reference
client_version = tuple([ int(n) for n in MySQLdb.connections._mysql.get_client_info().split('.')[:2] ])
if client_version >= (4, 1):
client_flag |= CLIENT.MULTI_STATEMENTS
if client_version >= (5, 0):
client_flag |= CLIENT.MULTI_RESULTS
kwargs2['client_flag'] = client_flag
### DCR: skip over the Connection __init__
#super(Connection, self).__init__(*args, **kwargs2)
MySQLdb._mysql.connection.__init__(self, *args, **kwargs2)
self.encoders = dict([ (k, v) for k, v in conv.items()
if type(k) is not int ])
self._server_version = tuple([ int(n) for n in self.get_server_info().split('.')[:2] ])
db = proxy(self)
### DCR: these functions create memory leaks with gc.DEBUG_SAVEALL turned on
"""
def _get_string_literal():
def string_literal(obj, dummy=None):
return db.string_literal(obj)
return string_literal
def _get_unicode_literal():
def unicode_literal(u, dummy=None):
return db.literal(u.encode(unicode_literal.charset))
return unicode_literal
def _get_string_decoder():
def string_decoder(s):
return s.decode(string_decoder.charset)
return string_decoder
"""
### DCR: use methods rather than inline-defined functions to prevent memory leaks
string_literal = self._get_string_literal(db)
self.unicode_literal = unicode_literal = self._get_unicode_literal(db)
self.string_decoder = string_decoder = self._get_string_decoder()
if not charset:
charset = self.character_set_name()
self.set_character_set(charset)
if sql_mode:
self.set_sql_mode(sql_mode)
if use_unicode:
self.converter[FIELD_TYPE.STRING].insert(-1, (None, string_decoder))
self.converter[FIELD_TYPE.VAR_STRING].insert(-1, (None, string_decoder))
self.converter[FIELD_TYPE.BLOB].insert(-1, (None, string_decoder))
self.encoders[types.StringType] = string_literal
self.encoders[types.UnicodeType] = unicode_literal
self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS
if self._transactional:
# PEP-249 requires autocommit to be initially off
self.autocommit(False)
self.messages = []
### DCR: make inline-defined functions into member methods to avoid garbage
def _string_literal(self, db, obj, dummy=None):
return db.string_literal(obj)
def _get_string_literal(self, db):
return Functor(self._string_literal, db)
def _unicode_literal(self, db, u, dummy=None):
return db.literal(u.encode(unicode_literal.charset))
def _get_unicode_literal(self, db):
return Functor(self._unicode_literal, db)
def _string_decoder(self, s):
return s.decode(string_decoder.charset)
def _get_string_decoder(self):
# make it into a Functor since MySQLdb.connections.Connection wants to set
# attributes on its string_decoder
return Functor(self._string_decoder)
def close(self):
Connection.close(self)
# break garbage cycles
self.unicode_literal = None
self.string_decoder = None
self.encoders = None
|
hj3938/panda3d
|
direct/src/directutil/DirectMySQLdbConnection.py
|
Python
|
bsd-3-clause
| 4,471
|
'''
1) Реализовать метод Гаусса с выбором по столбцу - Done
2) построить с его помощью оценщик числа обусловленности матрицы системы в строчной норме;
3) протестировать качество данного оценщика и получаемой с его помощью
оценкой относительной погрешности решения через вектор невязки.
стр.36 (снизу) - оценщик срочной матричной нормы
'''
# стр. 32:
# Решение линейной системы Ax=b с использованием метода Гаусса с выбором по столбцу
# сводится к решению двух треугольных систем:
# | Ly = Pb
# | Ux = y
# где PA = LU
#
# Про нормы, числа обусловленности и оценку ошибок:
# http://www.math.hawaii.edu/~jb/math411/nation1
#
import numpy as np
import matplotlib.pyplot as plt
from scipy.linalg import solve as npsolve
from task1 import PLU_decomposition, PLR_decomposition
from utils import test_matrices, MatrixBuilder, get_random_vector
test_matrix = np.matrix([
[1, 1, 1],
[2, 2, 5],
[4, 6, 8],
], dtype=np.float)
test_matrix = test_matrices[0]
# test_matrix = np.matrix([
# [50, 2, 900],
# [0.1, 0.1, 14],
# [1.0, 99, 1.99],
# ], dtype=np.float)
# test_matrix = np.matrix([
# [10000, 0, 90000],
# [0.0001, 0.1, 14],
# [1.0, 99999, 1.999],
# ], dtype=np.float)
test_vector = np.array([
1, 0, 0, 0
# 1, 2, 3,
])
def max_row_norm(A):
if len(A.shape) == 1:
return max([abs(x) for x in A])
m, n = A.shape[0], A.shape[1] # m строк, n столбцов
absolute_sums = [
sum([abs(A[i, j]) for j in range(0, n)]) for i in range(0, m)
]
return max(absolute_sums)
# norm = max_row_norm
norm = lambda A: np.linalg.norm(A, ord=np.inf)
norm1 = lambda A: np.linalg.norm(A, ord=1)
def condition_number_for(A):
return norm(A) * norm(np.linalg.inv(A))
# First, we get PLU decomposition of A, such that PA=LU, so LUx=Pb
# Second, we solve the equation Ly=Pb for y by forward substitution
# Third, we solve the equation Ux=y for x
# https://en.wikipedia.org/wiki/Triangular_matrix#Forward_and_back_substitution
def plu_solve(A: np.matrix, b: np.array) -> np.array:
P, L, R = PLR_decomposition(A)
Pb = P.transpose().dot(b)
y = np.zeros(b.size)
for m in range(0, b.size):
y[m] = Pb[m] - sum(
L[m][i] * y[i] for i in range(0, m)
)
y[m] /= L[m][m]
x = np.zeros(b.size)
for k in reversed(range(0, b.size)):
x[k] = y[k] - sum(
R[k][i] * x[i] for i in range(k + 1, b.size)
)
x[k] /= R[k][k]
return x
# Оценщик числа обусловленности - condition number estimator
def cn_estimator(A: np.matrix):
P, L, R = PLU_decomposition(A)
n = A.shape[0]
p = np.zeros(n, dtype=np.float)
y = np.zeros(n, dtype=np.float)
T = R.transpose()
for k in reversed(range(0, n)):
Tk = np.array([ T[k, i] for i in range(0, k) ], dtype=np.float)
ykp = (1.0 - p[k]) / T[k, k]
ykm = (-1.0 - p[k]) / T[k, k]
pkp = p[:k] + Tk*ykp
pkm = p[:k] + Tk*ykm
if abs(ykp) + norm1(pkp) >= abs(ykm) + norm1(pkm):
y[k] = ykp
p[:k] = pkp
else:
y[k] = ykm
p[:k] = pkm
# Step 2:
r = npsolve(L.transpose(), y)
w = npsolve(L, P.transpose().dot(r))
z = npsolve(R, w)
# A.transpose().dot(P).dot(u), U.transpose().dot(L.transpose()).dot(u), y
# Step 3:
k_num = norm(A) * norm(z) / norm(r)
return k_num
if __name__ == '__main__':
A = test_matrix
b = test_vector
# print(npsolve(A, b))
# print(plu_solve(A, b))
print(cn_estimator(A))
print(condition_number_for(A))
nmatrices = 25
msize = 30
systems = []
for i in range(0, nmatrices):
matrix = MatrixBuilder(msize).nonsingular().nearsingular(3000).gen()
vector = get_random_vector(msize)
systems.append( (matrix, vector) )
from task2 import relative_residual
reals = [] ; estims = []; residues = []
lvalues = []
rvalues = []
for i in range(0, len(systems)):
matrix, vector = systems[i]
x = npsolve(matrix.astype(np.float128), vector.astype(np.float128))
xhat = plu_solve(matrix, vector)
res = relative_residual(matrix, xhat, vector)
real_cn = condition_number_for(matrix)
estim_cn = cn_estimator(matrix)
reals.append(real_cn)
estims.append(estim_cn)
residues.append(res * estim_cn)
r = vector - matrix.dot(x)
lvalues.append( norm(x - xhat) / norm(x) )
rvalues.append( estim_cn * (norm(r) / norm(b)) )
for i in range(0, len(systems)):
print(reals[i], estims[i])
#print(lvalues[i], rvalues[i]
fig, ax1 = plt.subplots()
#ax2 = ax1.twinx()
ax1.set_yscale('log')
ax1.plot(reals)
ax1.plot(estims)
# ax2.plot(residues, color='r')
# plt.plot(lvalues)
# plt.plot(rvalues)
plt.show()
|
maxmalysh/congenial-octo-adventure
|
mod1/task3.py
|
Python
|
unlicense
| 5,323
|
# ===========================================================================
# eXe
# Copyright 2004-2005, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
This class transforms an eXe node into a page on a single page website
"""
import logging
import re
from cgi import escape
from urllib import quote
from exe.webui.blockfactory import g_blockFactory
from exe.engine.error import Error
from exe.engine.path import Path
from exe.export.pages import Page, uniquifyNames
from exe.webui import common
from exe import globals as G
log = logging.getLogger(__name__)
# ===========================================================================
class SinglePage(Page):
"""
This class transforms an eXe node into a page on a single page website
"""
def save(self, filename, for_print=0):
"""
Save page to a file.
'outputDir' is the directory where the filenames will be saved
(a 'path' instance)
"""
outfile = open(filename, "wb")
outfile.write(self.render(self.node.package,for_print).encode('utf8'))
outfile.close()
def render(self, package, for_print=0):
"""
Returns an XHTML string rendering this page.
"""
html = self.renderHeader(package.title, for_print)
if for_print:
# include extra onload bit:
html += u'<body onload="print_page()">\n'
else:
html += u"<body>\n"
html += u"<div id=\"content\">\n"
html += u"<div id=\"header\">\n"
html += "<h1>"+escape(package.title)+"</h1>"
html += u"</div>\n"
html += u"<div id=\"main\">\n"
html += self.renderNode(package.root, 1)
html += u"</div>\n"
html += self.renderLicense()
html += self.renderFooter()
html += u"</div>\n"
html += u"</body></html>\n"
# JR: Eliminamos los atributos de las ecuaciones
aux = re.compile("exe_math_latex=\"[^\"]*\"")
html = aux.sub("", html)
aux = re.compile("exe_math_size=\"[^\"]*\"")
html = aux.sub("", html)
#JR: Cambio la ruta de los enlaces del glosario y el &
html = html.replace("../../../../../mod/glossary", "../../../../mod/glossary")
html = html.replace("&concept", "&concept")
return html
def renderHeader(self, name, for_print=0):
"""
Returns an XHTML string for the header of this page.
"""
html = u"<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
html += u'<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 '
html += u'Transitional//EN" '
html += u'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n'
lenguaje = G.application.config.locale
html += u"<html lang=\"" + lenguaje + "\" xml:lang=\"" + lenguaje + "\" xmlns=\"http://www.w3.org/1999/xhtml\">\n"
html += u"<head>\n"
html += u"<style type=\"text/css\">\n"
html += u"@import url(base.css);\n"
html += u"@import url(content.css);\n"
html += u"</style>"
html += u"<title>"
html += name
html += "</title>\n"
html += u"<meta http-equiv=\"Content-Type\" content=\"text/html; "
html += u" charset=utf-8\" />\n";
html += u'<script type="text/javascript" src="common.js"></script>\n'
if for_print:
# include extra print-script for onload bit
html += u'<script type="text/javascript">\n'
html += u'function print_page() {\n'
html += u' window.print();\n'
html += u' window.close();\n'
html += u'}\n'
html += u'</script>\n'
html += u"</head>\n"
return html
#JR: modifico esta funcion para que ponga hX en cada nodo
def renderNode(self, node, nivel):
"""
Returns an XHTML string for this node and recurse for the children
"""
html = ""
html += '<div class="node">\n'
html += ' <div class=\"nodeDecoration\">'
html += '<h' + str(nivel) + ' class=\"nodeTitle\">'
html += escape(node.titleLong)
html += '</h' + str(nivel) + '></div>\n'
style = self.node.package.style
for idevice in node.idevices:
html += u' <div class="%s" id="id%s">\n' % (idevice.klass,
idevice.id)
block = g_blockFactory.createBlock(None, idevice)
if not block:
log.critical("Unable to render iDevice.")
raise Error("Unable to render iDevice.")
if hasattr(idevice, "isQuiz"):
html += block.renderJavascriptForWeb()
html += self.processInternalLinks(block.renderView(style))
html += u' </div>\n' # iDevice div
html += '</div>\n' # node div
for child in node.children:
html += self.renderNode(child, nivel+1)
return html
def processInternalLinks(self, html):
"""
take care of any internal links which are in the form of:
href="exe-node:Home:Topic:etc#Anchor"
For this SinglePage Export, go ahead and keep the #Anchor portion,
but remove the 'exe-node:Home:Topic:etc', since it is all
exported into the same file.
"""
return common.removeInternalLinkNodes(html)
|
luisgg/iteexe
|
exe/export/singlepage.py
|
Python
|
gpl-2.0
| 6,261
|
#!usr/bin/env python
'''Eyelog.py is a module that contains of all classes needed to
create an logfile for the analysis of behavioral experiments
that yield eyemovement data.
In theory an experiment consists of a series of trials.
In a trial are a number of events that can occur. Events
are the occurence of samples of the eyetracker, fixations, saccades
etc. An event is characterized by a timestamp and a type.
Events will be primaraly sorted on timestamp and then on the type.
'''
##
# \file eyelog.py
# This file contains utilities for log files.
#
import itertools
import functools
import abc
##
#An abstract base class for all LogEntries in an eyelog.
class LogEntry (abc.ABC):
## LogEntry contains functions from a ABCMeta classes
LGAZE = 0
## Entry that describes a right gaze sample
RGAZE = 1
## Entry that describes a left fixation
LFIX = 2
## Entry that describes a right fixation
RFIX = 3
## Entry that describes a stimulus
STIMULUS = 4
## Entry that describes a user defined message
MESSAGE = 5
## Entry that describes a saccade of the left eye
LSAC = 6
## Entry that descibes a saccade of the right eye
RSAC = 7
# extended types these don't belong to a log, but can be handy for unhandy formats
# that log a end event, or do some other marking like Begin and end
## Is a gaze in a eyelink asc log
ASCGAZE = 8
## Is a Eyelink fixation end in a asc log of the left eye
FIXENDL = 9
## Is a Eyelink fixation end in a asc log of the right eye
FIXENDR = 10
## Is a saccade end in an asc log of the left eye.
SACCENDL= 11
## Is a saccade end in an asc log of the right eye.
SACCENDR= 12
## Mark a begin in a ascii log
BEGIN = 13
## Mark an end in a ascii log
END = 14
## The separator used to separate columns.
SEP = '\t'
## Construct an instance of LogEntry
#
# @param entrytype defines what kind of log entry this is.
# @param eyetime A float that marks the time in the time of the eyetracker.
def __init__(self, entrytype, eyetime):
## The type of entry of this LogEntry
self.entrytype = entrytype
self.eyetime = eyetime
## Tell what kind of LogEntry this is.
def getEntryType(self) :
return self.entrytype
## Compares for object equality
def __eq__(self, other):
return type(self) is type(other) and self.__dict__ == other.__dict__
## Compares for object difference
def __ne__(self, other):
return not self == other
## This marks the timepoint in milliseconds when
def getEyeTime(self):
return self.eyetime
## callback used to sort logentries on time
@staticmethod
def sortCallback(el, er):
diff = el.getEyeTime() - er.getEyeTime()
if diff < 0:
return -1
elif diff == 0:
return 0
else:
return 1
## Serialize this logentry in EyeLink Ascii format
#
# @return A string that descibes the event suitable for a eyelink log.
@abc.abstractmethod
def toAsc(self):
''' Implement return of string in format as Eyelink edf to ascii does. '''
pass
##
# Create a deep copy of this instance
#
@abc.abstractmethod
def copy(self):
pass
# @abstractmethod
# def __str__():
# ''' Implement return of string in format as Eyelink edf to ascii does. '''
# pass
## Returns True if this is a left gaze sample
@staticmethod
def isLGaze(entry):
return entry.getEntryType() == LogEntry.LGAZE
## Returns True if this is a right gaze sample
@staticmethod
def isRGaze(entry):
return entry.getEntryType() == LogEntry.RGAZE
## Returns True if this is a gaze sample
@staticmethod
def isGaze(entry):
return LogEntry.isLGaze(entry) or LogEntry.isRGaze(entry)
## Returns True if this is a fixation
@staticmethod
def isFixation(entry):
''' Determines whether a LogEntry is a fixaton '''
return entry.getEntryType() == LogEntry.LFIX or entry.getEntryType() == LogEntry.RFIX
## Returns True if this is a saccade
@staticmethod
def isSaccade(entry):
''' Determines whether a LogEntry is a saccade '''
return entry.getEntryType() == LogEntry.LSAC or entry.getEntryType() == LogEntry.RSAC
## Returns True if this is a message entry
@staticmethod
def isMessage(entry):
return entry.getEntryType() == LogEntry.MESSAGE
## Function usefull for removing fixations and or saccades from the log.
@staticmethod
def _removeEyeEvents(entry):
return not (LogEntry.isFixation(entry) or LogEntry.isSaccade(entry))
##
# Removes all fixations and saccades from the log.
#
# \param entries a list (iterable) of LogEntry
# \returns list of filtered entries.
@staticmethod
def removeEyeEvents(entries):
entries = filter(LogEntry._removeEyeEvents, entries)
return list(entries)
## Removes the left gaze entries.
#
# can be used to obtain a list without the left gaze
# \param entries a list (iterable) of LogEntry
@staticmethod
def removeLeftGaze(entries):
filt = lambda e: not e.getEntryType() == LogEntry.LGAZE
return list(filter(filt, entries))
##
# Removes the right gaze entries
#
# can be used to obtain a list without the right gaze '''
# \param entries an iterable
@staticmethod
def removeRightGaze(entries):
filt = lambda e: not e.getEntryType() == LogEntry.RGAZE
return list(filter(filt, entries))
##
#This describes a left or right eye gaze sample of the eyetracker.
class GazeEntry(LogEntry) :
##
# construct a GazeEntry
#
# \param entrytype LogEntry.RGAZE or LogEntry.LGaze
# \param eyetime the time on the eyetracker when the gaze was sampled
# \param x float of the x-coordinate of the gaze
# \param y float of the y-coordinate of the gaze
# \param pupil the pupilsize during the gazesample
def __init__(self, entrytype, eyetime, x, y, pupil) :
super(GazeEntry, self).__init__(entrytype, eyetime)
## the x coordinate of the gaze sample
self.x = x
## the y coordinate of the gaze sample
self.y = y
## the pupilsize of the gaze sample
self.pupil = pupil
##
# Create a copy from the original
#
def copy(self):
return GazeEntry(self.entrytype, self.eyetime, self.x, self.y, self.pupil)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
raise ValueError("GazeEntries should not be converted to .asc format")
##
#An entry in a logfile that logs the gaze compatible for Fixation program(Cozijn)
class AscGazeEntry(LogEntry):
##
# \param lgaze a GazeEntry for the left eye
# \param rgaze a GazeEntry for the right eye
def __init__(self, lgaze, rgaze):
time = None
if lgaze:
time = lgaze.getEyeTime()
elif rgaze:
time = rgaze.getEyeTime()
else:
raise ValueError("Both lgaze and rgaze are not valid")
super(AscGazeEntry, self).__init__(LogEntry.ASCGAZE, time)
## contains a GazeEntry for the left eye.
self.lgaze = lgaze
## contains a GazeEntry for the right eye.
self.rgaze = rgaze
##
# deep copy the asc gaze entry
def copy(self):
return AscGazeEntry(self.lgaze, self.rgaze)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
string = str(int(self.getEyeTime()))
SEP = LogEntry.SEP
#TODO check if fixation performs better if times are in integers
if self.lgaze:
string += (SEP + str(self.lgaze.x) + SEP + str(self.lgaze.y) + SEP
+ str(self.lgaze.pupil)
)
if self.rgaze:
string += (SEP + str(self.rgaze.x) + SEP + str(self.rgaze.y) + SEP
+ str(self.rgaze.pupil)
)
return string
##
# A FixationEntry Describes a fixation of the left or right eye
#
# A fixation is determined by a location on a 2D plane and ,its time
# and the duration of the fixation.
class FixationEntry(LogEntry):
##
# Init a fixation entry
#
# \param entrytype Must be LogEntry.LFIX or LogEntry.RFIX
# \param eyetime The time (ms) on the eyetracker when the fixation starts.
# \param eyedur The duration of the fixation.
# \param x The x coordinate of the fixation
# \param y The y coordinate of the fixation
def __init__(self, entrytype, eyetime, eyedur, x, y):
super(FixationEntry,self).__init__(entrytype, eyetime)
self.x = x
## the y coordinate of this fixation
self.y = y
## the duration of this fixation
self.duration = eyedur
def copy(self):
return FixationEntry(
self.entrytype, self.eyetime, self.duration, self.x, self.y
)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
ssac = ""
if self.getEntryType() == LogEntry.LFIX:
ssac = "SFIX\tL\t"
else:
ssac = "SFIX\tR\t"
return ssac + str(int(self.getEyeTime()))
## This class can be used to mark fixation ends in an asc log.
class FixationEndEntry(LogEntry):
##
# @param fixation a valid FixationEntry
def __init__(self, fixation):
## The FixationEntry that belongs to this end entry.
self.fixation = fixation
time = fixation.getEyeTime() + fixation.duration
entry = None
if fixation.getEntryType() == LogEntry.LFIX:
entry = LogEntry.FIXENDL
elif fixation.getEntryType() == LogEntry.RFIX:
entry = LogEntry.FIXENDR
else:
raise ValueError("Fixation entry should be initialized with LFIX or RFIX")
super(FixationEndEntry, self).__init__(entry, time)
##
# create a deepcopy of oneself
def copy(self):
return FixationEntry(self.fixation)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
efix = ""
if self.getEntryType() == LogEntry.FIXENDL:
efix = "EFIX\tL"
elif self.getEntryType() == LogEntry.FIXENDR:
efix = "EFIX\tR"
else: raise ValueError("Wrong entry type in FixationEndEntry")
SEP = LogEntry.SEP
return efix + SEP + str(int(self.fixation.getEyeTime()))+ \
SEP + str(int(self.getEyeTime())) + \
SEP + str(int(self.fixation.duration)) + \
SEP + str(self.fixation.x) + \
SEP + str(self.fixation.y) + \
SEP + str(int(self.fixation.duration))
## A logged user defined message in a string.
#
class MessageEntry(LogEntry):
##
# \param eyetime the time of the message in eyetracking time
# \param message a used defined string
def __init__(self, eyetime, message):
super(MessageEntry, self).__init__(LogEntry.MESSAGE, eyetime)
## the message of this Message entry
self.message = message
##
# Return a deepcopy of the message entry
def copy(self):
return MessageEntry(self.eyetime, str(self.message))
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
return "MSG" + "\t" + str(int(self.getEyeTime())) + "\t" + self.message
##
# SaccadeEntry This describes a saccade in an experiment
#
# A saccade is defined by its eye, a starttime, duration and start and end position
class SaccadeEntry(LogEntry):
##
# Initialize a SaccadeEntry
#
# \param et must be LogEntry.LSAC or LogEntry.ESAC
# \param eyetime the time (ms) on eyetracker when the saccade started
# \param duration the duration(ms) of the saccade
# \param xstart starting x coordinate.
# \param ystart starting y coordinate.
# \param xend end x coordinate.
# \param yend end y coordinate.
def __init__(self,
et,
eyetime,
duration,
xstart,
ystart,
xend,
yend
):
super(SaccadeEntry, self).__init__(et, eyetime)
## x coordinate of the start
self.xstart = xstart
## y coordinate of the start
self.ystart = ystart
## x coordinate of the end position
self.xend = xend
## y coordinate of the end position
self.yend = yend
## duration of the saccade in ms.
self.duration = duration
##
# create a deep copy of oneself
def copy(self):
return SaccadeEntry(
self.entrytype, self.eyetime, self.duration,
self.xstart, self.ystart, self.xend, self.yend
)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
string =""
SEP = LogEntry.SEP
if self.getEntryType() == LogEntry.RSAC:
string += ("SSACC" + SEP + "R" + SEP + str(int(self.getEyeTime())) )
elif self.getEntryType() == LogEntry.LSAC:
string += ("SSACC" + SEP + "L" + SEP + str(int(self.getEyeTime())) )
else: raise ValueError("Unknown entry type")
return string
## A marker for saccade end in a Eyelink ascii log.
class SaccadeEndEntry(LogEntry):
##
# inits a SaccadeEndEntry
def __init__(self, saccade):
## the saccade that belong to this end marker
self.saccade = saccade
start = saccade.getEyeTime() + saccade.duration
entry = None
if saccade.getEntryType() == LogEntry.RSAC:
entry = LogEntry.SACCENDR
elif saccade.getEntryType() == LogEntry.LSAC:
entry = LogEntry.SACCENDL
else:
raise ValueError("No saccade to init SaccadeEndEntry")
super(SaccadeEndEntry, self).__init__(entry, start)
##
# Creates a deepcopy of oneself
def copy(self):
return SaccadeEndEntry(self.saccade.copy())
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
esac = ""
SEP = LogEntry.SEP
if self.getEntryType() == LogEntry.SACCENDR:
esac += ("ESACC" + SEP + "R" + SEP)
elif self.getEntryType() == LogEntry.SACCENDL:
esac += ("ESACC" + SEP + "L" + SEP)
else:
raise ValueError("invalid end saccade encountered")
esac += ( str(int(self.saccade.getEyeTime())) + SEP + \
str(int(self.getEyeTime())) + SEP + \
str(int(self.saccade.duration)) + SEP + \
str(self.saccade.xstart) + SEP + \
str(self.saccade.ystart) + SEP + \
str(self.saccade.xend) + SEP + \
str(self.saccade.yend) + SEP + \
str(int(self.saccade.duration))
)
return esac
##
# This is a startentry for a fixation log. It is present in the log because
# it expands to some general information in an Eyelink ascii log
#
class StartEntry(LogEntry):
'''
This entry logs some shit that Fixation demands...
'''
# indicators for which eye is measured
## log uses left eye
LEFT = 1
## log uses right eye
RIGHT = 2
## log uses both eyes
BINO = 3
##
# Log some mess that fixation expects, but clutters your output...
#
# \param time the time of the log
# \param eye must be StartEntr.LEFT, .RIGHT, or BINO, but I don't expect
# Fixation to understand about binocular data...
# \param le default to windows line ending(works on most systems)
def __init__(self, time, eye, le="\r\n"):
## tells which eye is present in data.
self.eye = eye
## tells which line ending must be used
self.le = le
super(StartEntry, self).__init__(LogEntry.BEGIN, time)
def copy(self):
return StartEntry(self.eyetime, self.eye, self.le)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
''' Return a ascii presentation of these events '''
SEP = LogEntry.SEP
start = "START" + SEP + str(int(self.getEyeTime())) + SEP
samples = "SAMPLES" + SEP + "GAZE" + SEP
events = "EVENTS" + SEP + "GAZE" + SEP
postfix = "TRACKING" + SEP + "CR" + SEP + "FILTER" + SEP + "2"
if self.eye == StartEntry.LEFT:
start += "LEFT" + SEP
# TODO hard coded "250" isn't really nice"
events += SEP.join(["LEFT", "RATE", "250", postfix])
samples += SEP.join(["LEFT", "HTARGET", "RATE", "250", postfix])
elif self.eye == StartEntry.RIGHT:
start += "RIGHT" + SEP
events += SEP.join(["RIGHT", "RATE", "250", postfix])
samples += SEP.join(["RIGHT", "HTARGET", "RATE", "250", postfix])
elif self.eye == StartEntry.BINO:
# NOTE Fixation tool doesn't know about this.
start += "".join(["LEFT", SEP, "RIGHT", SEP])
events += SEP.join(["LEFT", "RIGHT", "RATE", "250", postfix])
samples += SEP.join(["LEFT", "RIGHT", "HTARGET", "RATE", "250", postfix])
else:
raise ValueError("Unknown eye\"type\"")
start += SEP.join(["SAMPLES", "EVENTS"])
#return start
return self.le.join([start, events, samples])
##
# needed to mark an end in a Eyelink ascii log
class EndEntry(LogEntry):
##
# Inits an end entry
def __init__(self, time):
super(EndEntry, self).__init__(LogEntry.END, time)
def copy(self):
return EndEntry(self.eyetime)
##
# Create a string from self in Eyelink ascii format
def toAsc(self):
SEP = LogEntry.SEP
string = "END" + SEP + str(int(self.getEyeTime())) + SEP + "SAMPLES" + SEP + "RES"
return string
##
# Generator funtion that yields fixations
# @param entries a iterable of LogEntry
def generateFixations(entries):
''' Generates fixations '''
for entry in entries:
if entry.getEntryType() == LogEntry.LFIX or entry.getEntryType() == LogEntry.RFIX:
yield entry
##
# Generator funtion that yields saccades
# @param entries a iterable of LogEntry
def generateSaccades(entries):
''' Generates saccades '''
for entry in entries:
if entry.getEntryType() == LogEntry.LSAC or entry.getEntryType() == LogEntry.RSAC:
yield entry
##
# yields EndFixation marks for an Eyelink log
# @param entries a iterable of LogEntry
def generateNewEndfixations(entries):
''' Generates end fixations '''
for fix in generateFixations(entries):
yield EndFixation(fix)
##
# yields EndSaccade marks for an Eyelink log
# @param entries a iterable of LogEntry
def generateNewEndSaccades(entries):
''' Generates end saccades '''
for sac in generateSaccades(entries):
yield EndSaccade(sac)
##
# Generator for fixation of the lefteye
# @param entries a iterable of LogEntry
def generateLGaze(entries):
for e in entries:
if LogEntry.isLGaze(e):
yield e
##
# Generator for fixation of the righteye
# @param entries a iterable of LogEntry
def generateRGaze(entries):
for e in entries:
if LogEntry.isRGaze(e):
yield e
##
# Callable class to sort logentries for an Eyelink compatible log
class SortFixationLog:
# These values are used to sort entries with an equal timestamp
## Entry with end.
end = 0
## Entry with msg.
msg = end + 1
## Entry with start fixation left eye.
sfixl = msg + 1
## Entry with start fixation right eye.
sfixr = sfixl + 1
## Entry with start saccade left eye.
ssacl = sfixr + 1
## Entry with start saccade right eye.
ssacr = ssacl + 1
## Entry with gaze.
gaze = ssacr + 1
## Entry with end fixation with right eye.
efixr = gaze + 1
## Entry with end fixation with left eye.
efixl = efixr + 1
## Entry with end saccade with right eye.
esacr = efixl + 1
## Entry with end saccade with left eye.
esacl = esacr + 1
## Entry with start marker.
start = esacl + 1
## Entry with .
##
# this dictionary maps LogEntry.getEntryType() to above messages
# so the above order is used for sorting.
mapdict = {
LogEntry.LFIX : sfixl ,
LogEntry.RFIX : sfixr ,
LogEntry.MESSAGE : msg ,
LogEntry.LSAC : ssacl ,
LogEntry.RSAC : ssacr ,
LogEntry.ASCGAZE : gaze ,
LogEntry.FIXENDR : efixr ,
LogEntry.FIXENDL : efixl ,
LogEntry.SACCENDR : esacr ,
LogEntry.SACCENDL : esacl ,
LogEntry.BEGIN : start ,
LogEntry.END : end
}
##
# This method can be used by a sorting algoritm to sort entries
# the items will be sorted on timestamp first and LogEntries with
# equal timestamps will be sorten on entry type secondly.
#
# \todo When a key error occures raise a new exceptions, since it
# is a programming and not a runtime error.
def __call__(self, lefthandside, righthandside):
diff = LogEntry.sortCallback(lefthandside, righthandside)
if diff == 0:
try:
return self.mapdict[lefthandside.getEntryType()] -\
self.mapdict[righthandside.getEntryType()]
except KeyError as e:
print("left = ",lefthandside.getEntryType(), end=' ')
print("\tright= ",righthandside.getEntryType())
return diff
##
# Appends begin and end entries to a Eyelink ascii log
def _appendBeginEndEntries(entries, eye):
import re
## Can be used as test to filter a list of entries with trials
# that contain a line with trialbeg
class FilterTrialBegin:
regex = re.compile(r"^trialbeg\s+\d+\s+\d+\s+\d+\s+([a-zA-Z]+)$")
def __call__(self, entry):
m = self.regex.match(entry.message)
if m:
if m.group(1) != "FILL":
return True
return False
## Can be used as test to filter a list of entries with trials
# that contain a line with trialend
class FilterTrialEnd:
regex = re.compile(r"^trialend\s+\d+\s+\d+\s+\d+\s+([a-zA-Z]+)$")
def __call__(self, entry):
m = self.regex.match(entry.message)
if m:
if m.group(1) != "FILL":
return True
return False
filtlist= list(filter(LogEntry.isMessage, entries))
begins = list(filter(FilterTrialBegin(), filtlist))
ends = list(filter(FilterTrialEnd(), filtlist))
for i in begins:
entries.append (StartEntry(i.getEyeTime(), eye))
for i in ends:
entries.append (EndEntry(i.getEyeTime()))
##
# This function examines the gaze data. Creates it's own fixations and saccades
# and tries to log all those events with the normal event to a file with filename
# filename.
# @param entries
# @param filename
def saveForFixation(entries, filename):
f = open(filename, "wb")
# create end events for fixations and saccades.
endfixations = []
for fix in generateFixations(entries):
endfixations.append(FixationEndEntry(fix))
endsaccades = []
for sac in generateSaccades(entries):
endsaccades.append(SaccadeEndEntry(sac))
entries.extend(endfixations)
entries.extend(endsaccades)
# generate AscGazeEntries
for i, j in itertools.zip_longest(generateLGaze(entries), generateRGaze(entries)):
entries.append( AscGazeEntry(i, j) )
eyetype = StartEntry.LEFT
for e in reversed(entries):
if e.getEntryType() == LogEntry.ASCGAZE:
if e.lgaze and e.rgaze:
eyetype = StartEntry.BINO
elif e.lgaze:
eyetype = StartEntry.LEFT
elif e.rgaze:
eyetype = StartEntry.RIGHT
else:
raise ValueError("invalid AscGazeEntry encountered")
break
_appendBeginEndEntries(entries, eyetype)
#remove ordinary gaze data to keep fixation compatible gazedata
filtobj = lambda e: not LogEntry.isGaze(e)
entries = list(filter(filtobj, entries))
entries.sort(key=functools.cmp_to_key(SortFixationLog()))
for i in entries:
f.write((i.toAsc() + "\r\n").encode('utf8'))
f.close()
|
UiL-OTS-labs/iSpector
|
log/eyelog.py
|
Python
|
gpl-2.0
| 24,902
|
from openpnm.phases import mixtures
import openpnm.models as mods
class H2O(mixtures.GenericSpecies):
r"""
Creates Phase object with preset models and values for H2O ions
Parameters
----------
network : OpenPNM Network object
The network to which this phase object will be attached.
project : OpenPNM Project object, optional
The Project with which this phase should be associted. If a
``network`` is given then this is ignored and the Network's project is
used. If a ``network`` is not given then this is mandatory.
name : string, optional
The name of the phase. This is useful to keep track of the objects
throughout the simulation. The name must be unique to the project. If
no name is given, one is generated.
Examples
--------
>>> import openpnm as op
>>> import openpnm.phases.mixtures as mixtures
>>> pn = op.network.Cubic(shape=[5, 5, 5])
>>> H2O = mixtures.species.liquids.H2O(network=pn)
"""
def __init__(self, **kwargs):
super().__init__(**kwargs)
self['pore.molecular_weight'] = 0.0291
self['pore.molar_diffusion_volume'] = 17.9 # Wrong
|
TomTranter/OpenPNM
|
openpnm/phases/mixtures/species/liquids/H2O.py
|
Python
|
mit
| 1,203
|
# -*- coding: utf-8 -*-
#
# OppiaMobile-Server documentation build configuration file, created by
# sphinx-quickstart on Tue Jun 25 16:03:07 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
from datetime import date
sys.path.append(os.path.dirname(__file__))
sys.path.append(os.path.abspath('../oppia/'))
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
if django.VERSION < (1, 4):
from django.core.management import setup_environ
settings = __import__(os.environ["DJANGO_SETTINGS_MODULE"])
setup_environ(settings)
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo', 'sphinx.ext.coverage',]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OppiaMobile'
copyright = u'%d, Digital Campus' % date.today().year
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.10.1'
# The full version, including alpha/beta/rc tags.
release = '0.10.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'OppiaMobile'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'OppiaMobile.tex', u'OppiaMobile Documentation',
u'Alex Little, Digital Campus', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'oppiamobile-server', u'OppiaMobile-Server Documentation',
[u'Alex Little, Digital Campus'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OppiaMobile', u'OppiaMobile Documentation',
u'Alex Little, Digital Campus', 'OppiaMobile', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
DigitalCampus/django-nurhi-oppia
|
docs/conf.py
|
Python
|
gpl-3.0
| 8,476
|
from HTMLComponent import HTMLComponent
from GUIComponent import GUIComponent
from config import KEY_LEFT, KEY_RIGHT, KEY_HOME, KEY_END, KEY_0, KEY_DELETE, KEY_BACKSPACE, KEY_OK, KEY_TOGGLEOW, KEY_ASCII, KEY_TIMEOUT, KEY_NUMBERS, ConfigElement, ConfigText, ConfigPassword
from Components.ActionMap import NumberActionMap, ActionMap
from enigma import eListbox, eListboxPythonConfigContent, eRCInput, eTimer
from Screens.MessageBox import MessageBox
class ConfigList(HTMLComponent, GUIComponent, object):
def __init__(self, list, session = None):
GUIComponent.__init__(self)
self.l = eListboxPythonConfigContent()
self.l.setSeperation(200)
self.timer = eTimer()
self.list = list
self.onSelectionChanged = [ ]
self.current = None
self.session = session
self.vumachine = file("/proc/stb/info/vumodel").read().strip()
def execBegin(self):
rcinput = eRCInput.getInstance()
if self.vumachine == "ultimo":
rcinput.setKeyboardMode(rcinput.kmNone)
else:
rcinput.setKeyboardMode(rcinput.kmAscii)
self.timer.callback.append(self.timeout)
def execEnd(self):
rcinput = eRCInput.getInstance()
if self.vumachine == "ultimo":
rcinput.setKeyboardMode(rcinput.kmAscii)
else:
rcinput.setKeyboardMode(rcinput.kmNone)
self.timer.callback.remove(self.timeout)
def toggle(self):
selection = self.getCurrent()
selection[1].toggle()
self.invalidateCurrent()
def handleKey(self, key):
selection = self.getCurrent()
if selection and selection[1].enabled:
selection[1].handleKey(key)
self.invalidateCurrent()
if key in KEY_NUMBERS:
self.timer.start(1000, 1)
def getCurrent(self):
return self.l.getCurrentSelection()
def getCurrentIndex(self):
return self.l.getCurrentSelectionIndex()
def setCurrentIndex(self, index):
if self.instance is not None:
self.instance.moveSelectionTo(index)
def invalidateCurrent(self):
self.l.invalidateEntry(self.l.getCurrentSelectionIndex())
def invalidate(self, entry):
# when the entry to invalidate does not exist, just ignore the request.
# this eases up conditional setup screens a lot.
if entry in self.__list:
self.l.invalidateEntry(self.__list.index(entry))
GUI_WIDGET = eListbox
def selectionChanged(self):
if isinstance(self.current,tuple) and len(self.current) >= 2:
self.current[1].onDeselect(self.session)
self.current = self.getCurrent()
if isinstance(self.current,tuple) and len(self.current) >= 2:
self.current[1].onSelect(self.session)
else:
return
for x in self.onSelectionChanged:
x()
def postWidgetCreate(self, instance):
instance.selectionChanged.get().append(self.selectionChanged)
instance.setContent(self.l)
def preWidgetRemove(self, instance):
if isinstance(self.current,tuple) and len(self.current) >= 2:
self.current[1].onDeselect(self.session)
instance.selectionChanged.get().remove(self.selectionChanged)
instance.setContent(None)
def setList(self, l):
self.timer.stop()
self.__list = l
self.l.setList(self.__list)
if l is not None:
for x in l:
assert len(x) < 2 or isinstance(x[1], ConfigElement), "entry in ConfigList " + str(x[1]) + " must be a ConfigElement"
def getList(self):
return self.__list
list = property(getList, setList)
def timeout(self):
self.handleKey(KEY_TIMEOUT)
def isChanged(self):
is_changed = False
for x in self.list:
is_changed |= x[1].isChanged()
return is_changed
class ConfigListScreen:
def __init__(self, list, session = None, on_change = None):
self["config_actions"] = NumberActionMap(["SetupActions", "InputAsciiActions", "KeyboardInputActions"],
{
"gotAsciiCode": self.keyGotAscii,
"ok": self.keyOK,
"left": self.keyLeft,
"right": self.keyRight,
"home": self.keyHome,
"end": self.keyEnd,
"deleteForward": self.keyDelete,
"deleteBackward": self.keyBackspace,
"toggleOverwrite": self.keyToggleOW,
"pageUp": self.keyPageUp,
"pageDown": self.keyPageDown,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
}, -1) # to prevent left/right overriding the listbox
self["VirtualKB"] = ActionMap(["VirtualKeyboardActions"],
{
"showVirtualKeyboard": self.KeyText,
}, -2)
self["VirtualKB"].setEnabled(False)
self["config"] = ConfigList(list, session = session)
if on_change is not None:
self.__changed = on_change
else:
self.__changed = lambda: None
if not self.handleInputHelpers in self["config"].onSelectionChanged:
self["config"].onSelectionChanged.append(self.handleInputHelpers)
def handleInputHelpers(self):
if self["config"].getCurrent() is not None:
if isinstance(self["config"].getCurrent()[1], ConfigText) or isinstance(self["config"].getCurrent()[1], ConfigPassword):
if self.has_key("VKeyIcon"):
self["VirtualKB"].setEnabled(True)
self["VKeyIcon"].boolean = True
if self.has_key("HelpWindow"):
if self["config"].getCurrent()[1].help_window.instance is not None:
helpwindowpos = self["HelpWindow"].getPosition()
from enigma import ePoint
self["config"].getCurrent()[1].help_window.instance.move(ePoint(helpwindowpos[0],helpwindowpos[1]))
else:
if self.has_key("VKeyIcon"):
self["VirtualKB"].setEnabled(False)
self["VKeyIcon"].boolean = False
else:
if self.has_key("VKeyIcon"):
self["VirtualKB"].setEnabled(False)
self["VKeyIcon"].boolean = False
def KeyText(self):
from Screens.VirtualKeyBoard import VirtualKeyBoard
self.session.openWithCallback(self.VirtualKeyBoardCallback, VirtualKeyBoard, title = self["config"].getCurrent()[0], text = self["config"].getCurrent()[1].getValue())
def VirtualKeyBoardCallback(self, callback = None):
if callback is not None and len(callback):
self["config"].getCurrent()[1].setValue(callback)
self["config"].invalidate(self["config"].getCurrent())
def keyOK(self):
self["config"].handleKey(KEY_OK)
def keyLeft(self):
self["config"].handleKey(KEY_LEFT)
self.__changed()
def keyRight(self):
self["config"].handleKey(KEY_RIGHT)
self.__changed()
def keyHome(self):
self["config"].handleKey(KEY_HOME)
self.__changed()
def keyEnd(self):
self["config"].handleKey(KEY_END)
self.__changed()
def keyDelete(self):
self["config"].handleKey(KEY_DELETE)
self.__changed()
def keyBackspace(self):
self["config"].handleKey(KEY_BACKSPACE)
self.__changed()
def keyToggleOW(self):
self["config"].handleKey(KEY_TOGGLEOW)
self.__changed()
def keyGotAscii(self):
self["config"].handleKey(KEY_ASCII)
self.__changed()
def keyNumberGlobal(self, number):
self["config"].handleKey(KEY_0 + number)
self.__changed()
def keyPageDown(self):
if self["config"].getCurrentIndex() + 10 <= (len(self["config"].getList()) - 1):
self["config"].setCurrentIndex(self["config"].getCurrentIndex() + 10)
else:
self["config"].setCurrentIndex((len(self["config"].getList()) - 1))
def keyPageUp(self):
if self["config"].getCurrentIndex() - 10 > 0:
self["config"].setCurrentIndex(self["config"].getCurrentIndex() - 10)
else:
self["config"].setCurrentIndex(0)
def saveAll(self):
for x in self["config"].list:
x[1].save()
# keySave and keyCancel are just provided in case you need them.
# you have to call them by yourself.
def keySave(self):
self.saveAll()
self.close()
def cancelConfirm(self, result):
if not result:
return
for x in self["config"].list:
x[1].cancel()
self.close()
def closeMenuList(self, recursive = False):
if self["config"].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _("Really close without saving settings?"))
else:
self.close(recursive)
def keyCancel(self):
self.closeMenuList()
def closeRecursive(self):
self.closeMenuList(True)
|
popazerty/bnigma2
|
lib/python/Components/ConfigList.py
|
Python
|
gpl-2.0
| 7,993
|
#
# Copyright (c) 2017 SUSE Linux GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 3 of the GNU General Public License as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, contact SUSE LLC.
#
# To contact SUSE about this file by physical or electronic mail,
# you may find current contact information at www.suse.com
"""
sdsc Module
===================
.. default-domain:: py
Checks a given DocBook XML file for stylistic errors
"""
import logging
__projectname__ = "suse-doc-style-checker"
__programname__ = "SUSE Documentation Style Checker"
__license__ = "LGPL-2.1+"
__description__ = "checks a given DocBook XML file for stylistic errors"
__authors__ = "Stefan Knorr, Thomas Schraitle, Fabian Vogt"
__url__ = "https://github.com/tomschr/sdsc"
__version__ = "2016.7.0.0"
#: Set default logging handler to avoid "No handler found" warnings.
# See https://docs.python.org/3/howto/logging.html#library-config
logging.getLogger().addHandler(logging.NullHandler())
|
tomschr/sdsc
|
src/sdsc/__init__.py
|
Python
|
lgpl-2.1
| 1,375
|
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.identity import base
from tempest import config
from tempest.lib.common.utils import data_utils
from tempest.lib.common.utils import test_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
CONF = config.CONF
class RolesV3TestJSON(base.BaseIdentityV3AdminTest):
@classmethod
def resource_setup(cls):
super(RolesV3TestJSON, cls).resource_setup()
cls.roles = list()
for _ in range(3):
role_name = data_utils.rand_name(name='role')
role = cls.roles_client.create_role(name=role_name)['role']
cls.roles.append(role)
u_name = data_utils.rand_name('user')
u_desc = '%s description' % u_name
u_email = '%s@testmail.tm' % u_name
cls.u_password = data_utils.rand_password()
cls.domain = cls.create_domain()
cls.project = cls.projects_client.create_project(
data_utils.rand_name('project'),
description=data_utils.rand_name('project-desc'),
domain_id=cls.domain['id'])['project']
cls.group_body = cls.groups_client.create_group(
name=data_utils.rand_name('Group'), project_id=cls.project['id'],
domain_id=cls.domain['id'])['group']
cls.user_body = cls.users_client.create_user(
name=u_name, description=u_desc, password=cls.u_password,
email=u_email, project_id=cls.project['id'],
domain_id=cls.domain['id'])['user']
cls.role = cls.roles_client.create_role(
name=data_utils.rand_name('Role'))['role']
@classmethod
def resource_cleanup(cls):
cls.roles_client.delete_role(cls.role['id'])
cls.groups_client.delete_group(cls.group_body['id'])
cls.users_client.delete_user(cls.user_body['id'])
cls.projects_client.delete_project(cls.project['id'])
# NOTE(harika-vakadi): It is necessary to disable the domain
# before deleting,or else it would result in unauthorized error
cls.domains_client.update_domain(cls.domain['id'], enabled=False)
cls.domains_client.delete_domain(cls.domain['id'])
for role in cls.roles:
cls.roles_client.delete_role(role['id'])
super(RolesV3TestJSON, cls).resource_cleanup()
@decorators.attr(type='smoke')
@decorators.idempotent_id('18afc6c0-46cf-4911-824e-9989cc056c3a')
def test_role_create_update_show_list(self):
r_name = data_utils.rand_name('Role')
role = self.roles_client.create_role(name=r_name)['role']
self.addCleanup(self.roles_client.delete_role, role['id'])
self.assertIn('name', role)
self.assertEqual(role['name'], r_name)
new_name = data_utils.rand_name('NewRole')
updated_role = self.roles_client.update_role(role['id'],
name=new_name)['role']
self.assertIn('name', updated_role)
self.assertIn('id', updated_role)
self.assertIn('links', updated_role)
self.assertNotEqual(r_name, updated_role['name'])
new_role = self.roles_client.show_role(role['id'])['role']
self.assertEqual(new_name, new_role['name'])
self.assertEqual(updated_role['id'], new_role['id'])
roles = self.roles_client.list_roles()['roles']
self.assertIn(role['id'], [r['id'] for r in roles])
@decorators.idempotent_id('c6b80012-fe4a-498b-9ce8-eb391c05169f')
def test_grant_list_revoke_role_to_user_on_project(self):
self.roles_client.create_user_role_on_project(self.project['id'],
self.user_body['id'],
self.role['id'])
roles = self.roles_client.list_user_roles_on_project(
self.project['id'], self.user_body['id'])['roles']
self.assertEqual(1, len(roles))
self.assertEqual(self.role['id'], roles[0]['id'])
self.roles_client.check_user_role_existence_on_project(
self.project['id'], self.user_body['id'], self.role['id'])
self.roles_client.delete_role_from_user_on_project(
self.project['id'], self.user_body['id'], self.role['id'])
@decorators.idempotent_id('6c9a2940-3625-43a3-ac02-5dcec62ef3bd')
def test_grant_list_revoke_role_to_user_on_domain(self):
self.roles_client.create_user_role_on_domain(
self.domain['id'], self.user_body['id'], self.role['id'])
roles = self.roles_client.list_user_roles_on_domain(
self.domain['id'], self.user_body['id'])['roles']
self.assertEqual(1, len(roles))
self.assertEqual(self.role['id'], roles[0]['id'])
self.roles_client.check_user_role_existence_on_domain(
self.domain['id'], self.user_body['id'], self.role['id'])
self.roles_client.delete_role_from_user_on_domain(
self.domain['id'], self.user_body['id'], self.role['id'])
@decorators.idempotent_id('cbf11737-1904-4690-9613-97bcbb3df1c4')
def test_grant_list_revoke_role_to_group_on_project(self):
# Grant role to group on project
self.roles_client.create_group_role_on_project(
self.project['id'], self.group_body['id'], self.role['id'])
# List group roles on project
roles = self.roles_client.list_group_roles_on_project(
self.project['id'], self.group_body['id'])['roles']
self.assertEqual(1, len(roles))
self.assertEqual(self.role['id'], roles[0]['id'])
# Add user to group, and insure user has role on project
self.groups_client.add_group_user(self.group_body['id'],
self.user_body['id'])
self.addCleanup(self.groups_client.delete_group_user,
self.group_body['id'], self.user_body['id'])
body = self.token.auth(user_id=self.user_body['id'],
password=self.u_password,
user_domain_name=self.domain['name'],
project_name=self.project['name'],
project_domain_name=self.domain['name'])
roles = body['token']['roles']
self.assertEqual(len(roles), 1)
self.assertEqual(roles[0]['id'], self.role['id'])
self.roles_client.check_role_from_group_on_project_existence(
self.project['id'], self.group_body['id'], self.role['id'])
# Revoke role to group on project
self.roles_client.delete_role_from_group_on_project(
self.project['id'], self.group_body['id'], self.role['id'])
@decorators.idempotent_id('4bf8a70b-e785-413a-ad53-9f91ce02faa7')
def test_grant_list_revoke_role_to_group_on_domain(self):
self.roles_client.create_group_role_on_domain(
self.domain['id'], self.group_body['id'], self.role['id'])
roles = self.roles_client.list_group_roles_on_domain(
self.domain['id'], self.group_body['id'])['roles']
self.assertEqual(1, len(roles))
self.assertEqual(self.role['id'], roles[0]['id'])
self.roles_client.check_role_from_group_on_domain_existence(
self.domain['id'], self.group_body['id'], self.role['id'])
self.roles_client.delete_role_from_group_on_domain(
self.domain['id'], self.group_body['id'], self.role['id'])
@decorators.idempotent_id('f5654bcc-08c4-4f71-88fe-05d64e06de94')
def test_list_roles(self):
# Return a list of all roles
body = self.roles_client.list_roles()['roles']
found = [role for role in body if role in self.roles]
self.assertEqual(len(found), len(self.roles))
def _create_implied_role(self, prior_role_id, implies_role_id,
ignore_not_found=False):
self.roles_client.create_role_inference_rule(
prior_role_id, implies_role_id)
if ignore_not_found:
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.roles_client.delete_role_inference_rule,
prior_role_id,
implies_role_id)
else:
self.addCleanup(
self.roles_client.delete_role_inference_rule,
prior_role_id,
implies_role_id)
@decorators.idempotent_id('c90c316c-d706-4728-bcba-eb1912081b69')
def test_implied_roles_create_check_show_delete(self):
prior_role_id = self.roles[0]['id']
implies_role_id = self.roles[1]['id']
# Create an inference rule from prior_role to implies_role
self._create_implied_role(prior_role_id, implies_role_id,
ignore_not_found=True)
# Check if the inference rule exists
self.roles_client.check_role_inference_rule(
prior_role_id, implies_role_id)
# Show the inference rule and check its elements
resp_body = self.roles_client.show_role_inference_rule(
prior_role_id, implies_role_id)
self.assertIn('role_inference', resp_body)
role_inference = resp_body['role_inference']
for key1 in ['prior_role', 'implies']:
self.assertIn(key1, role_inference)
for key2 in ['id', 'links', 'name']:
self.assertIn(key2, role_inference[key1])
# Delete the inference rule
self.roles_client.delete_role_inference_rule(
prior_role_id, implies_role_id)
# Check if the inference rule no longer exists
self.assertRaises(
lib_exc.NotFound,
self.roles_client.show_role_inference_rule,
prior_role_id,
implies_role_id)
@decorators.idempotent_id('dc6f5959-b74d-4e30-a9e5-a8255494ff00')
def test_roles_hierarchy(self):
# Create inference rule from "roles[0]" to "role[1]"
self._create_implied_role(
self.roles[0]['id'], self.roles[1]['id'])
# Create inference rule from "roles[0]" to "role[2]"
self._create_implied_role(
self.roles[0]['id'], self.roles[2]['id'])
# Create inference rule from "roles[2]" to "role"
self._create_implied_role(
self.roles[2]['id'], self.role['id'])
# Listing inferences rules from "roles[2]" should only return "role"
rules = self.roles_client.list_role_inferences_rules(
self.roles[2]['id'])['role_inference']
self.assertEqual(1, len(rules['implies']))
self.assertEqual(self.role['id'], rules['implies'][0]['id'])
# Listing inferences rules from "roles[0]" should return "roles[1]" and
# "roles[2]" (only direct rules are listed)
rules = self.roles_client.list_role_inferences_rules(
self.roles[0]['id'])['role_inference']
implies_ids = [role['id'] for role in rules['implies']]
self.assertEqual(2, len(implies_ids))
self.assertIn(self.roles[1]['id'], implies_ids)
self.assertIn(self.roles[2]['id'], implies_ids)
@decorators.idempotent_id('c8828027-df48-4021-95df-b65b92c7429e')
def test_assignments_for_implied_roles_create_delete(self):
# Create a grant using "roles[0]"
self.roles_client.create_user_role_on_project(
self.project['id'], self.user_body['id'], self.roles[0]['id'])
self.addCleanup(
self.roles_client.delete_role_from_user_on_project,
self.project['id'], self.user_body['id'], self.roles[0]['id'])
# Create an inference rule from "roles[0]" to "roles[1]"
self._create_implied_role(self.roles[0]['id'], self.roles[1]['id'],
ignore_not_found=True)
# In the effective list of role assignments, both prior role and
# implied role should be present. This means that a user can
# authenticate using both roles (both roles will be present
# in the token).
params = {'scope.project.id': self.project['id'],
'user.id': self.user_body['id']}
role_assignments = self.role_assignments.list_role_assignments(
effective=True, **params)['role_assignments']
self.assertEqual(2, len(role_assignments))
roles_ids = [assignment['role']['id']
for assignment in role_assignments]
self.assertIn(self.roles[0]['id'], roles_ids)
self.assertIn(self.roles[1]['id'], roles_ids)
# After deleting the implied role, only the assignment with "roles[0]"
# should be present.
self.roles_client.delete_role_inference_rule(
self.roles[0]['id'], self.roles[1]['id'])
role_assignments = self.role_assignments.list_role_assignments(
effective=True, **params)['role_assignments']
self.assertEqual(1, len(role_assignments))
roles_ids = [assignment['role']['id']
for assignment in role_assignments]
self.assertIn(self.roles[0]['id'], roles_ids)
@decorators.idempotent_id('d92a41d2-5501-497a-84bb-6e294330e8f8')
def test_domain_roles_create_delete(self):
domain_role = self.roles_client.create_role(
name=data_utils.rand_name('domain_role'),
domain_id=self.domain['id'])['role']
self.addCleanup(
test_utils.call_and_ignore_notfound_exc,
self.roles_client.delete_role,
domain_role['id'])
domain_roles = self.roles_client.list_roles(
domain_id=self.domain['id'])['roles']
self.assertEqual(1, len(domain_roles))
self.assertIn(domain_role, domain_roles)
self.roles_client.delete_role(domain_role['id'])
domain_roles = self.roles_client.list_roles(
domain_id=self.domain['id'])['roles']
self.assertEmpty(domain_roles)
@decorators.idempotent_id('eb1e1c24-1bc4-4d47-9748-e127a1852c82')
def test_implied_domain_roles(self):
# Create two roles in the same domain
domain_role1 = self.setup_test_role(domain_id=self.domain['id'])
domain_role2 = self.setup_test_role(domain_id=self.domain['id'])
# Check if we can create an inference rule from roles in the same
# domain
self._create_implied_role(domain_role1['id'], domain_role2['id'])
# Create another role in a different domain
domain2 = self.setup_test_domain()
domain_role3 = self.setup_test_role(domain_id=domain2['id'])
# Check if we can create cross domain implied roles
self._create_implied_role(domain_role1['id'], domain_role3['id'])
# Finally, we also should be able to create an implied from a
# domain role to a global one
self._create_implied_role(domain_role1['id'], self.role['id'])
if CONF.identity_feature_enabled.forbid_global_implied_dsr:
# The contrary is not true: we can't create an inference rule
# from a global role to a domain role
self.assertRaises(
lib_exc.Forbidden,
self.roles_client.create_role_inference_rule,
self.role['id'],
domain_role1['id'])
@decorators.idempotent_id('3859df7e-5b78-4e4d-b10e-214c8953842a')
def test_assignments_for_domain_roles(self):
domain_role = self.setup_test_role(domain_id=self.domain['id'])
# Create a grant using "domain_role"
self.roles_client.create_user_role_on_project(
self.project['id'], self.user_body['id'], domain_role['id'])
self.addCleanup(
self.roles_client.delete_role_from_user_on_project,
self.project['id'], self.user_body['id'], domain_role['id'])
# NOTE(rodrigods): Regular roles would appear in the effective
# list of role assignments (meaning the role would be returned in
# a token) as a result from the grant above. This is not the case
# for domain roles, they should not appear in the effective role
# assignments list.
params = {'scope.project.id': self.project['id'],
'user.id': self.user_body['id']}
role_assignments = self.role_assignments.list_role_assignments(
effective=True, **params)['role_assignments']
self.assertEmpty(role_assignments)
@decorators.idempotent_id('3748c316-c18f-4b08-997b-c60567bc6235')
def test_list_all_implied_roles(self):
# Create inference rule from "roles[0]" to "roles[1]"
self._create_implied_role(
self.roles[0]['id'], self.roles[1]['id'])
# Create inference rule from "roles[0]" to "roles[2]"
self._create_implied_role(
self.roles[0]['id'], self.roles[2]['id'])
# Create inference rule from "roles[2]" to "role"
self._create_implied_role(
self.roles[2]['id'], self.role['id'])
rules = self.roles_client.list_all_role_inference_rules()[
'role_inferences']
# Sort the rules by the number of inferences, since there should be 1
# inference between "roles[2]" and "role" and 2 inferences for
# "roles[0]": between "roles[1]" and "roles[2]".
sorted_rules = sorted(rules, key=lambda r: len(r['implies']))
# Check that 2 sets of rules are returned.
self.assertEqual(2, len(sorted_rules))
# Check that only 1 inference rule exists between "roles[2]" and "role"
self.assertEqual(1, len(sorted_rules[0]['implies']))
# Check that 2 inference rules exist for "roles[0]": one between
# "roles[1]" and one between "roles[2]".
self.assertEqual(2, len(sorted_rules[1]['implies']))
# Check that "roles[2]" is the "prior_role" and that "role" is the
# "implies" role.
self.assertEqual(self.roles[2]['id'],
sorted_rules[0]['prior_role']['id'])
self.assertEqual(self.role['id'],
sorted_rules[0]['implies'][0]['id'])
# Check that "roles[0]" is the "prior_role" and that "roles[1]" and
# "roles[2]" are the "implies" roles.
self.assertEqual(self.roles[0]['id'],
sorted_rules[1]['prior_role']['id'])
implies_ids = [r['id'] for r in sorted_rules[1]['implies']]
self.assertIn(self.roles[1]['id'], implies_ids)
self.assertIn(self.roles[2]['id'], implies_ids)
|
Juniper/tempest
|
tempest/api/identity/admin/v3/test_roles.py
|
Python
|
apache-2.0
| 19,049
|
MAX_DEPTH = 2
#class StatementGenerator(BaseGenerator):
# def generate(self, depth): raise NotImplementedError
######################################################################
# Expressions
class ExpressionGenerator(object):
def generate(self, depth): raise NotImplementedError
class BinaryExpression(ExpressionGenerator):
def __init__(self, op):
self.op = op
def generate(self, depth):
exprs = list(MakeExpressionGenerator(depth + 1))
for x in exprs:
for y in exprs:
yield x + ' ' + self.op + ' ' + y
class UnaryExpression(ExpressionGenerator):
def __init__(self, op):
self.op = op
def generate(self, depth):
for x in MakeExpressionGenerator(depth + 1):
yield self.op + ' ' + x
class EmptyExpression(ExpressionGenerator):
def generate(self, depth):
yield ' '
class ConstantExpression(ExpressionGenerator):
def __init__(self, value):
self.value = value
def generate(self, depth):
yield repr(self.value)
class NameExpression(ExpressionGenerator):
def __init__(self, value):
self.value = value
def generate(self, depth):
yield self.value
class InvalidExpression(ExpressionGenerator):
def __init__(self, value):
self.value = value
def generate(self, depth):
yield self.value
class ParenthForm(ExpressionGenerator):
def __init__(self, trailingComma):
self.trailingComma = trailingComma
def generate(self, depth):
subexprs = list(MakeExpressionGenerator(depth + 1))
head = '('
if self.trailingComma:
tail = ', )'
else:
tail = ')'
args3 = [x + ', ' + y + ', ' + z for x in subexprs for y in subexprs for z in subexprs]
args2 = [x + ', ' + y for x in subexprs for y in subexprs]
args1 = [x for x in subexprs]
yield head + tail
for values in args1 + args2 + args3:
yield head + values + tail
class GeneratorExpression(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
for target in MakeExpressionGenerator(depth + 1):
for inTest in MakeExpressionGenerator(depth + 1):
yield '(' + expr + ' for ' + target + ' in ' + inTest + ')'
for cond in MakeExpressionGenerator(depth + 1):
yield '(' + expr + ' for ' + target + ' in ' + inTest + ' if ' + cond + ')'
class DictionaryDisplay(ExpressionGenerator):
def generate(self, depth):
subexprs = list(MakeExpressionGenerator(depth + 1))
args = [x + ': ' + y for x in subexprs for y in subexprs]
yield '{}'
for x in args:
yield '{' + x + '}'
yield '{' + x + ', }'
for x, y in zip(args, args):
yield '{' + x + ', ' + y + '}'
yield '{' + x + ', ' + y + ', }'
class StringConversion(object):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
yield '`' + expr + '`'
class YieldExpression(object):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
yield 'yield ' + expr
yield '( yield ' + expr + ')'
class AttributeExpression(object):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
for name in MakeIdentifierGenerator(depth + 1):
yield expr + '.' + name
class SubscriptionExpression(object):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
yield expr + '[' + subscript + ']'
yield expr + '[' + subscript + ', ]'
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
for subscript2 in MakeExpressionGenerator(depth + 1):
yield expr + '[' + subscript + ', ' + subscript2 + ']'
yield expr + '[' + subscript + ', ' + subscript2 + ', ]'
class SlicingExpression(object):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
yield expr + '[' + subscript + ':]'
#yield expr + '[:' + subscript + ']'
#yield expr + '[' + subscript + '::]'
#yield expr + '[::' + subscript + ']'
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
for subscript2 in MakeExpressionGenerator(depth + 1):
yield expr + '[' + subscript + ': ' + subscript2 + ']'
#yield expr + '[' + subscript + ', ' + subscript2 + ': ]'
#yield expr + '[ :' + subscript + ', ' + subscript2 + ']'
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
for subscript2 in MakeExpressionGenerator(depth + 1):
for subscript3 in MakeExpressionGenerator(depth + 1):
yield expr + '[' + subscript + ': ' + subscript2 + ':' + subscript3 + ']'
class CallExpression(object):
def generate(self, depth):
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
yield expr + '(' + subscript + ')'
yield expr + '(' + subscript + ', )'
yield expr + '(*' + subscript + ')'
yield expr + '(**' + subscript + ', )'
for name in MakeExpressionGenerator(depth + 1):
yield expr + '(' + name + ' = ' + subscript + ')'
for expr in MakeExpressionGenerator(depth + 1):
for subscript in MakeExpressionGenerator(depth + 1):
for subscript2 in MakeExpressionGenerator(depth + 1):
yield expr + '(' + subscript + ', ' + subscript2 + ')'
yield expr + '(' + subscript + ', ' + subscript2 + ', )'
yield expr + '(*' + subscript + ', ' + subscript2 + ')'
yield expr + '(**' + subscript + ', ' + subscript2 + ', )'
yield expr + '(*' + subscript + ', **' + subscript2 + ')'
yield expr + '(**' + subscript + ', *' + subscript2 + ')'
for name in MakeExpressionGenerator(depth + 1):
yield expr + '(' + name + ' = ' + subscript + ', ' + subscript2 + ')'
yield expr + '(' + subscript + ', ' + name + ' = ' + subscript2 + ')'
######################################################################
# Statements
class AssertStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
yield 'assert ' + expr + '\n'
for expr2 in MakeExpressionGenerator(0):
yield 'assert ' + expr + ', ' + expr2 + '\n'
class AssignmentStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
for expr2 in MakeExpressionGenerator(0):
yield expr + ' = ' + expr2 + '\n'
yield expr + ' += ' + expr2 + '\n'
class DelStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
yield 'del ' + expr + '\n'
class PrintStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
yield 'print ' + expr + '\n'
yield 'print ' + expr + ', ' + '\n'
class ReturnStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
yield 'return ' + expr + '\n'
class RaiseStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
yield 'raise ' + expr + '\n'
class PassStatement(ExpressionGenerator):
def generate(self, depth):
yield 'pass\n'
class IfStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
for body in MakeStatementGenerator(depth + 1):
yield 'if ' + expr + ':\n' + body
yield 'if ' + expr + ':\n' + body + 'else:\n' + body
yield 'if ' + expr + ':\n' + body + 'elif ' + expr + ':\n' + body
yield 'if ' + expr + '\n'
class WhileStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
for body in MakeStatementGenerator(depth + 1):
yield 'while ' + expr + ':\n' + body
yield 'while ' + expr + ':\n' + body + 'else:\n' + body
yield 'while' + expr + '\n'
class ForStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
for body in MakeStatementGenerator(depth + 1):
yield 'for ' + expr + 'in ' + expr + ':\n' + body
yield 'for ' + expr + 'in ' + expr + ':\n' + body + 'else:\n' + body
yield 'for ' + expr + 'in ' + expr + '\n'
class ExpressionStatement(ExpressionGenerator):
def generate(self, depth):
for expr in MakeExpressionGenerator(0):
yield expr + '\n'
######################################################################
# Factories
class BinaryMaker(object):
def __init__(self, value):
self.value = value
def __call__(self):
return BinaryExpression(self.value)
def __repr__(self):
return 'BinaryMaker(' + repr(self.value) + ')'
class UnaryMaker(object):
def __init__(self, value):
self.value = value
def __call__(self):
return UnaryExpression(self.value)
def __repr__(self):
return 'UnaryMaker(' + repr(self.value) + ')'
class ConstantMaker(object):
def __init__(self, value):
self.value = value
def __call__(self):
return ConstantExpression(self.value)
def __repr__(self):
return 'ConstantMaker(' + repr(self.value) + ')'
class NameMaker(object):
def __init__(self, value):
self.value = value
def __call__(self):
return NameExpression(self.value)
def __repr__(self):
return 'NameMaker(' + self.value + ')'
class InvalidMaker(object):
def __init__(self, value):
self.value = value
def __call__(self):
return InvalidExpression(self.value)
def __repr__(self):
return 'InvalidMaker(' + self.value + ')'
class ParenthFormMaker(object):
def __init__(self, trailingComma):
self.trailingComma = trailingComma
def __call__(self):
return ParenthForm(self.trailingComma)
def __repr__(self):
return 'ParenthFormMaker(' + self.trailingComma + ')'
identifier_exprs = [NameMaker('foo'), NameMaker('...'), NameMaker('reallylongname' * 200)] # NameMaker('_bar'),
expr_gens = identifier_exprs + \
[BinaryMaker('+'), BinaryMaker('**'), BinaryMaker('<<'), BinaryMaker('&'), BinaryMaker('=='), BinaryMaker('>'), BinaryMaker(' in '), # BinaryMaker('-'), BinaryMaker('|'), BinaryMaker('^'),
UnaryMaker('~'), UnaryMaker(' not '), # UnaryMaker('-'), UnaryMaker('+'),
EmptyExpression,
GeneratorExpression,
DictionaryDisplay,
CallExpression,
AttributeExpression,
SubscriptionExpression,
SlicingExpression,
ConstantMaker(1), ConstantMaker('abc'), ConstantMaker(1L), ConstantMaker(1.0), ConstantMaker(1j),
ParenthFormMaker(False), ParenthFormMaker(True),
StringConversion,
YieldExpression,
InvalidMaker('$'), InvalidMaker('@'), InvalidMaker('!'), InvalidMaker('\\'), #InvalidMaker('\0'),
]
stmt_gens = [IfStatement, PassStatement, ExpressionStatement, WhileStatement, ForStatement, AssertStatement, AssignmentStatement, DelStatement, PrintStatement, ReturnStatement, RaiseStatement]
def MakeExpressionGenerator(depth):
"""yields all possible expressions"""
if depth == MAX_DEPTH:
yield '' # empty expr
else:
for exprGen in expr_gens:
for value in exprGen().generate(depth):
yield value
def Indent(value, depth):
if depth == 0:
return value
indent = ' ' * depth
return indent + value.replace('\n', '\n' + indent)
def MakeStatementGenerator(depth):
"""yields all possible expressions"""
if depth == MAX_DEPTH:
yield '' # empty expr
else:
for stmtGen in stmt_gens:
for value in stmtGen().generate(depth):
yield Indent(value, depth)
def MakeIdentifierGenerator(depth):
for exprGen in identifier_exprs:
for value in exprGen().generate(depth):
yield value
i = 0
for x in MakeStatementGenerator(0):
i += 1
if i % 1000 == 0:
print '.',
try:
compile(x, 'foo', 'exec')
except SyntaxError:
pass
print 'Ran %d tests', i
|
slozier/ironpython2
|
Tests/test_fuzz_parser.py
|
Python
|
apache-2.0
| 13,629
|
#
# The Multiverse Platform is made available under the MIT License.
#
# Copyright (c) 2012 The Multiverse Foundation
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify,
# merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
#
#
from multiverse.server.plugins import *
Engine.registerPlugin(InstancePlugin())
|
longde123/MultiversePlatform
|
server/config/common/instance.py
|
Python
|
mit
| 1,301
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# The MIT License
#
# Copyright (c) 2016 Grigory Chernyshev
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import mock
import pytest
from yagocd.resources import Base
from yagocd.resources import pipeline
from yagocd.resources.pipeline_config import PipelineConfigManager
class TestPipelineEntity(object):
def test_has_all_managers_methods(self):
excludes = ['list', 'find']
def get_public_methods(klass):
methods = set()
for name in dir(klass):
if name.startswith('_'):
continue
candidate = getattr(klass, name)
if hasattr(candidate, '__call__'):
methods.add(name)
return methods
managers_methods = get_public_methods(pipeline.PipelineManager)
entity_methods = get_public_methods(pipeline.PipelineEntity)
result = managers_methods - entity_methods - set(excludes)
assert len(result) == 0, "Some methods are missing in pipeline entity: {}".format(result)
@pytest.fixture()
def pipeline_entity(self, mock_session):
return pipeline.PipelineEntity(
session=mock_session,
data={'name': 'pipeline_1',
'materials': [
{'description': 'child_name_1', 'type': 'Pipeline'},
{'description': 'git_repo_1', 'type': 'Git'},
{'description': 'child_name_2', 'type': 'Pipeline'},
]
},
group='baz'
)
def test_entity_is_not_none(self, pipeline_entity):
assert pipeline_entity is not None
def test_is_instance_of_base(self, pipeline_entity):
assert isinstance(pipeline_entity, Base)
def test_reading_group(self, pipeline_entity):
assert pipeline_entity.group == 'baz'
def test_predecessors_empty(self, pipeline_entity):
assert pipeline_entity.predecessors == list()
def test_set_predecessors(self, pipeline_entity):
value = [1, 3, 5]
pipeline_entity.predecessors = value
assert pipeline_entity.predecessors == value
assert pipeline_entity.predecessors != pipeline_entity.descendants
def test_get_predecessors(self, mock_session, pipeline_entity):
child_1 = pipeline.PipelineEntity(mock_session, data=dict(name='child_1'))
child_2 = pipeline.PipelineEntity(mock_session, data=dict(name='child_2'))
child_3 = pipeline.PipelineEntity(mock_session, data=dict(name='child_3'))
child_4 = pipeline.PipelineEntity(mock_session, data=dict(name='child_4'))
child_2.predecessors = [child_1]
child_3.predecessors = [child_1, child_2]
child_4.predecessors = [child_3]
pipeline_entity.predecessors = [child_4]
assert pipeline_entity.get_predecessors() == [child_4]
assert (sorted(pipeline_entity.get_predecessors(True), key=lambda x: x.data.name)
== [child_1, child_2, child_3, child_4])
assert child_4.predecessors == [child_3]
assert (sorted(child_4.get_predecessors(True), key=lambda x: x.data.name)
== [child_1, child_2, child_3])
assert child_3.predecessors == [child_1, child_2]
assert (sorted(child_3.get_predecessors(True), key=lambda x: x.data.name)
== [child_1, child_2])
assert child_2.predecessors == [child_1]
assert (sorted(child_2.get_predecessors(True), key=lambda x: x.data.name) == [child_1])
assert child_1.predecessors == []
assert (sorted(child_1.get_predecessors(True), key=lambda x: x.data.name) == [])
def test_descendants_empty(self, pipeline_entity):
assert pipeline_entity.descendants == list()
def test_set_descendants(self, pipeline_entity):
value = [2, 4, 8]
pipeline_entity.descendants = value
assert pipeline_entity.descendants == value
assert pipeline_entity.predecessors != pipeline_entity.descendants
def test_get_descendants(self, mock_session, pipeline_entity):
parent_1 = pipeline.PipelineEntity(mock_session, data=dict(name='parent_1'))
parent_2 = pipeline.PipelineEntity(mock_session, data=dict(name='parent_2'))
parent_3 = pipeline.PipelineEntity(mock_session, data=dict(name='parent_3'))
parent_4 = pipeline.PipelineEntity(mock_session, data=dict(name='parent_4'))
parent_3.descendants = [parent_4]
parent_2.descendants = [parent_4, parent_3]
parent_1.descendants = [parent_2]
pipeline_entity.descendants = [parent_1]
assert pipeline_entity.get_descendants() == [parent_1]
assert (sorted(pipeline_entity.get_descendants(True), key=lambda x: x.data.name)
== [parent_1, parent_2, parent_3, parent_4])
assert parent_1.descendants == [parent_2]
assert (sorted(parent_1.get_descendants(True), key=lambda x: x.data.name)
== [parent_2, parent_3, parent_4])
assert parent_2.descendants == [parent_4, parent_3]
assert (sorted(parent_2.get_descendants(True), key=lambda x: x.data.name)
== [parent_3, parent_4])
assert parent_3.descendants == [parent_4]
assert (sorted(parent_3.get_descendants(True), key=lambda x: x.data.name) == [parent_4])
assert parent_4.descendants == []
assert (sorted(parent_4.get_descendants(True), key=lambda x: x.data.name) == [])
@mock.patch('yagocd.resources.pipeline.PipelineManager.get')
def test_indexed_based_access(self, get_mock, pipeline_entity):
counter = 123
_ = pipeline_entity[counter] # noqa
get_mock.assert_called_once_with(counter=counter, name=pipeline_entity.data.name)
@mock.patch('yagocd.resources.pipeline.PipelineManager.full_history')
def test_iterator_access(self, full_history_mock, pipeline_entity):
for _ in pipeline_entity:
pass
full_history_mock.assert_called_once_with(name=pipeline_entity.data.name)
def test_get_url(self, pipeline_entity):
assert (
pipeline_entity.get_url('http://example.com', 'test_name')
== 'http://example.com/go/tab/pipeline/history/test_name'
)
def test_url(self, pipeline_entity):
assert pipeline_entity.url == 'http://example.com/go/tab/pipeline/history/pipeline_1'
def test_config(self, pipeline_entity):
assert isinstance(pipeline_entity.config, PipelineConfigManager)
@mock.patch('yagocd.resources.pipeline.PipelineManager.history')
def test_history_call(self, history_mock, pipeline_entity):
pipeline_entity.history()
history_mock.assert_called_with(name=pipeline_entity.data.name, offset=0)
@mock.patch('yagocd.resources.pipeline.PipelineManager.history')
def test_full_history_call(self, history_mock, pipeline_entity):
history_mock.side_effect = [['foo', 'bar', 'baz'], []]
list(pipeline_entity.full_history())
calls = [mock.call(pipeline_entity.data.name, 0), mock.call(pipeline_entity.data.name, 3)]
history_mock.assert_has_calls(calls)
@mock.patch('yagocd.resources.pipeline.PipelineManager.history')
def test_last_call(self, history_mock, pipeline_entity):
pipeline_entity.last()
history_mock.assert_called_with(name=pipeline_entity.data.name)
@mock.patch('yagocd.resources.pipeline.PipelineManager.history')
def test_last_returns_last(self, history_mock, pipeline_entity):
history_mock.return_value = ['50', '30', '10']
assert pipeline_entity.last() == '50'
@mock.patch('yagocd.resources.pipeline.PipelineManager.get')
def test_get_call(self, get_mock, pipeline_entity):
pipeline_entity.get(0)
get_mock.assert_called_with(name=pipeline_entity.data.name, counter=0)
@mock.patch('yagocd.resources.pipeline.PipelineManager.status')
def test_status_call(self, status_mock, pipeline_entity):
pipeline_entity.status()
status_mock.assert_called_with(name=pipeline_entity.data.name)
@mock.patch('yagocd.resources.pipeline.PipelineManager.pause')
def test_pause_call(self, pause_mock, pipeline_entity):
pipeline_entity.pause('custom-reason')
pause_mock.assert_called_with(name=pipeline_entity.data.name, cause='custom-reason')
@mock.patch('yagocd.resources.pipeline.PipelineManager.unpause')
def test_unpause_call(self, unpause_mock, pipeline_entity):
pipeline_entity.unpause()
unpause_mock.assert_called_with(name=pipeline_entity.data.name)
@mock.patch('yagocd.resources.pipeline.PipelineManager.release_lock')
def test_release_lock_call(self, release_lock_mock, pipeline_entity):
pipeline_entity.release_lock()
release_lock_mock.assert_called_with(name=pipeline_entity.data.name)
@mock.patch('yagocd.resources.pipeline.PipelineManager.schedule')
def test_schedule_call(self, schedule_mock, pipeline_entity):
pipeline_entity.schedule()
schedule_mock.assert_called_with(name=pipeline_entity.data.name, materials=None, variables=None,
secure_variables=None)
@mock.patch('yagocd.resources.pipeline.PipelineManager.schedule_with_instance')
def test_schedule_with_instance_call(self, schedule_with_instance_mock, pipeline_entity):
pipeline_entity.schedule_with_instance()
schedule_with_instance_mock.assert_called_with(name=pipeline_entity.data.name, materials=None, variables=None,
secure_variables=None, backoff=0.5, max_tries=20)
@mock.patch('yagocd.resources.pipeline.PipelineManager.value_stream_map')
def test_value_stream_map_call(self, value_stream_map_mock, pipeline_entity):
counter = mock.MagicMock()
pipeline_entity.value_stream_map(counter=counter)
value_stream_map_mock.assert_called_with(
name=pipeline_entity.data.name,
counter=counter
)
|
grundic/yagocd
|
tests/test_pipeline_entity.py
|
Python
|
isc
| 11,256
|
"""
Django settings for quixotic_webapp project.
Generated by 'django-admin startproject' using Django 1.10.5.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# DB parameters in this file
from . import parameters
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'yh3$*6egtz79m@0(g!0txzr2rt2#xg852ne9cre&a3=twv#oc('
# SECURITY WARNING: don't run with debug turned on in production!
# DEBUG = False
DEBUG = True
ALLOWED_HOSTS = [
u'ec2-54-173-30-19.compute-1.amazonaws.com',
u'54.173.30.19',
]
# Application definition
INSTALLED_APPS = [
'quixotic_api.apps.QuixoticApiConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'quixotic_webapp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'quixotic_webapp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': parameters.DB_NAME,
'USER': parameters.DB_USER,
'PASSWORD': parameters.DB_PASSWORD,
'HOST': parameters.DB_HOST,
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
CORS_ORIGIN_WHITELIST = (
'localhost:8888'
)
|
zcarwile/quixotic_webapp
|
quixotic_webapp/settings.py
|
Python
|
gpl-3.0
| 3,517
|
"""Overall happening configuration."""
|
happeninghq/happening
|
src/happening/__init__.py
|
Python
|
mit
| 39
|
# pyresample, Resampling of remote sensing image data in python
#
# Copyright (C) 2010-2015
#
# Authors:
# Esben S. Nielsen
# Thomas Lavergne
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""Classes for geometry operations"""
from __future__ import absolute_import
import warnings
import numpy as np
from . import utils
from . import _spatial_mp
class DimensionError(Exception):
pass
class Boundary(object):
"""Container for geometry boundary.
Labelling starts in upper left corner and proceeds clockwise"""
def __init__(self, side1, side2, side3, side4):
self.side1 = side1
self.side2 = side2
self.side3 = side3
self.side4 = side4
class BaseDefinition(object):
"""Base class for geometry definitions"""
def __init__(self, lons=None, lats=None, nprocs=1):
if type(lons) != type(lats):
raise TypeError('lons and lats must be of same type')
elif lons is not None:
if lons.shape != lats.shape:
raise ValueError('lons and lats must have same shape')
self.nprocs = nprocs
# check the latitutes
if lats is not None and ((lats.min() < -90. or lats.max() > +90.)):
# throw exception
raise ValueError(
'Some latitudes are outside the [-90.;+90] validity range')
else:
self.lats = lats
# check the longitudes
if lons is not None and ((lons.min() < -180. or lons.max() >= +180.)):
# issue warning
warnings.warn('All geometry objects expect longitudes in the [-180:+180[ range. ' +
'We will now automatically wrap your longitudes into [-180:+180[, and continue. ' +
'To avoid this warning next time, use routine utils.wrap_longitudes().')
# wrap longitudes to [-180;+180[
self.lons = utils.wrap_longitudes(lons)
else:
self.lons = lons
self.cartesian_coords = None
def __eq__(self, other):
"""Test for approximate equality"""
if other.lons is None or other.lats is None:
other_lons, other_lats = other.get_lonlats()
else:
other_lons = other.lons
other_lats = other.lats
if self.lons is None or self.lats is None:
self_lons, self_lats = self.get_lonlats()
else:
self_lons = self.lons
self_lats = self.lats
try:
return (np.allclose(self_lons, other_lons, atol=1e-6,
rtol=5e-9) and
np.allclose(self_lats, other_lats, atol=1e-6,
rtol=5e-9))
except (AttributeError, ValueError):
return False
def __ne__(self, other):
"""Test for approximate equality"""
return not self.__eq__(other)
def get_lonlat(self, row, col):
"""Retrieve lon and lat of single pixel
:Parameters:
row : int
col : int
:Returns:
(lon, lat) : tuple of floats
"""
if self.ndim != 2:
raise DimensionError(('operation undefined '
'for %sD geometry ') % self.ndim)
elif self.lons is None or self.lats is None:
raise ValueError('lon/lat values are not defined')
return self.lons[row, col], self.lats[row, col]
def get_lonlats(self, data_slice=None, **kwargs):
"""Base method for lon lat retrieval with slicing"""
if self.lons is None or self.lats is None:
raise ValueError('lon/lat values are not defined')
elif data_slice is None:
return self.lons, self.lats
else:
return self.lons[data_slice], self.lats[data_slice]
def get_boundary_lonlats(self):
"""Returns Boundary objects"""
side1 = self.get_lonlats(data_slice=(0, slice(None)))
side2 = self.get_lonlats(data_slice=(slice(None), -1))
side3 = self.get_lonlats(data_slice=(-1, slice(None)))
side4 = self.get_lonlats(data_slice=(slice(None), 0))
return Boundary(side1[0], side2[0], side3[0][::-1], side4[0][::-1]), Boundary(side1[1], side2[1], side3[1][::-1], side4[1][::-1])
def get_cartesian_coords(self, nprocs=None, data_slice=None, cache=False):
"""Retrieve cartesian coordinates of geometry definition
:Parameters:
nprocs : int, optional
Number of processor cores to be used.
Defaults to the nprocs set when instantiating object
data_slice : slice object, optional
Calculate only cartesian coordnates for the defined slice
cache : bool, optional
Store result the result. Requires data_slice to be None
:Returns:
cartesian_coords : numpy array
"""
if self.cartesian_coords is None:
# Coordinates are not cached
if nprocs is None:
nprocs = self.nprocs
if data_slice is None:
# Use full slice
data_slice = slice(None)
lons, lats = self.get_lonlats(nprocs=nprocs, data_slice=data_slice)
if nprocs > 1:
cartesian = _spatial_mp.Cartesian_MP(nprocs)
else:
cartesian = _spatial_mp.Cartesian()
cartesian_coords = cartesian.transform_lonlats(np.ravel(lons),
np.ravel(lats))
if isinstance(lons, np.ndarray) and lons.ndim > 1:
# Reshape to correct shape
cartesian_coords = cartesian_coords.reshape(lons.shape[0],
lons.shape[1], 3)
if cache and data_slice is None:
self.cartesian_coords = cartesian_coords
else:
# Coordinates are cached
if data_slice is None:
cartesian_coords = self.cartesian_coords
else:
cartesian_coords = self.cartesian_coords[data_slice]
return cartesian_coords
@property
def corners(self):
"""Returns the corners of the current area.
"""
from pyresample.spherical_geometry import Coordinate
return [Coordinate(*self.get_lonlat(0, 0)),
Coordinate(*self.get_lonlat(0, -1)),
Coordinate(*self.get_lonlat(-1, -1)),
Coordinate(*self.get_lonlat(-1, 0))]
def __contains__(self, point):
"""Is a point inside the 4 corners of the current area? This uses
great circle arcs as area boundaries.
"""
from pyresample.spherical_geometry import point_inside, Coordinate
corners = self.corners
if isinstance(point, tuple):
return point_inside(Coordinate(*point), corners)
else:
return point_inside(point, corners)
def overlaps(self, other):
"""Tests if the current area overlaps the *other* area. This is based
solely on the corners of areas, assuming the boundaries to be great
circles.
:Parameters:
other : object
Instance of subclass of BaseDefinition
:Returns:
overlaps : bool
"""
from pyresample.spherical_geometry import Arc
self_corners = self.corners
other_corners = other.corners
for i in self_corners:
if i in other:
return True
for i in other_corners:
if i in self:
return True
self_arc1 = Arc(self_corners[0], self_corners[1])
self_arc2 = Arc(self_corners[1], self_corners[2])
self_arc3 = Arc(self_corners[2], self_corners[3])
self_arc4 = Arc(self_corners[3], self_corners[0])
other_arc1 = Arc(other_corners[0], other_corners[1])
other_arc2 = Arc(other_corners[1], other_corners[2])
other_arc3 = Arc(other_corners[2], other_corners[3])
other_arc4 = Arc(other_corners[3], other_corners[0])
for i in (self_arc1, self_arc2, self_arc3, self_arc4):
for j in (other_arc1, other_arc2, other_arc3, other_arc4):
if i.intersects(j):
return True
return False
def get_area(self):
"""Get the area of the convex area defined by the corners of the current
area.
"""
from pyresample.spherical_geometry import get_polygon_area
return get_polygon_area(self.corners)
def intersection(self, other):
"""Returns the corners of the intersection polygon of the current area
with *other*.
:Parameters:
other : object
Instance of subclass of BaseDefinition
:Returns:
(corner1, corner2, corner3, corner4) : tuple of points
"""
from pyresample.spherical_geometry import intersection_polygon
return intersection_polygon(self.corners, other.corners)
def overlap_rate(self, other):
"""Get how much the current area overlaps an *other* area.
:Parameters:
other : object
Instance of subclass of BaseDefinition
:Returns:
overlap_rate : float
"""
from pyresample.spherical_geometry import get_polygon_area
other_area = other.get_area()
inter_area = get_polygon_area(self.intersection(other))
return inter_area / other_area
class CoordinateDefinition(BaseDefinition):
"""Base class for geometry definitions defined by lons and lats only"""
def __init__(self, lons, lats, nprocs=1):
if lons.shape == lats.shape and lons.dtype == lats.dtype:
self.shape = lons.shape
self.size = lons.size
self.ndim = lons.ndim
self.dtype = lons.dtype
else:
raise ValueError(('%s must be created with either '
'lon/lats of the same shape with same dtype') %
self.__class__.__name__)
super(CoordinateDefinition, self).__init__(lons, lats, nprocs)
def concatenate(self, other):
if self.ndim != other.ndim:
raise DimensionError(('Unable to concatenate %sD and %sD '
'geometries') % (self.ndim, other.ndim))
klass = _get_highest_level_class(self, other)
lons = np.concatenate((self.lons, other.lons))
lats = np.concatenate((self.lats, other.lats))
nprocs = min(self.nprocs, other.nprocs)
return klass(lons, lats, nprocs=nprocs)
def append(self, other):
if self.ndim != other.ndim:
raise DimensionError(('Unable to append %sD and %sD '
'geometries') % (self.ndim, other.ndim))
self.lons = np.concatenate((self.lons, other.lons))
self.lats = np.concatenate((self.lats, other.lats))
self.shape = self.lons.shape
self.size = self.lons.size
def __str__(self):
# Rely on numpy's object printing
return ('Shape: %s\nLons: %s\nLats: %s') % (str(self.shape),
str(self.lons),
str(self.lats))
class GridDefinition(CoordinateDefinition):
"""Grid defined by lons and lats
:Parameters:
lons : numpy array
lats : numpy array
nprocs : int, optional
Number of processor cores to be used for calculations.
:Attributes:
shape : tuple
Grid shape as (rows, cols)
size : int
Number of elements in grid
Properties:
lons : object
Grid lons
lats : object
Grid lats
cartesian_coords : object
Grid cartesian coordinates
"""
def __init__(self, lons, lats, nprocs=1):
if lons.shape != lats.shape:
raise ValueError('lon and lat grid must have same shape')
elif lons.ndim != 2:
raise ValueError('2 dimensional lon lat grid expected')
super(GridDefinition, self).__init__(lons, lats, nprocs)
class SwathDefinition(CoordinateDefinition):
"""Swath defined by lons and lats
:Parameters:
lons : numpy array
lats : numpy array
nprocs : int, optional
Number of processor cores to be used for calculations.
:Attributes:
shape : tuple
Swath shape
size : int
Number of elements in swath
ndims : int
Swath dimensions
Properties:
lons : object
Swath lons
lats : object
Swath lats
cartesian_coords : object
Swath cartesian coordinates
"""
def __init__(self, lons, lats, nprocs=1):
if lons.shape != lats.shape:
raise ValueError('lon and lat arrays must have same shape')
elif lons.ndim > 2:
raise ValueError('Only 1 and 2 dimensional swaths are allowed')
super(SwathDefinition, self).__init__(lons, lats, nprocs)
class AreaDefinition(BaseDefinition):
"""Holds definition of an area.
:Parameters:
area_id : str
ID of area
name : str
Name of area
proj_id : str
ID of projection
proj_dict : dict
Dictionary with Proj.4 parameters
x_size : int
x dimension in number of pixels
y_size : int
y dimension in number of pixels
area_extent : list
Area extent as a list (LL_x, LL_y, UR_x, UR_y)
nprocs : int, optional
Number of processor cores to be used
lons : numpy array, optional
Grid lons
lats : numpy array, optional
Grid lats
:Attributes:
area_id : str
ID of area
name : str
Name of area
proj_id : str
ID of projection
proj_dict : dict
Dictionary with Proj.4 parameters
x_size : int
x dimension in number of pixels
y_size : int
y dimension in number of pixels
shape : tuple
Corresponding array shape as (rows, cols)
size : int
Number of points in grid
area_extent : tuple
Area extent as a tuple (LL_x, LL_y, UR_x, UR_y)
area_extent_ll : tuple
Area extent in lons lats as a tuple (LL_lon, LL_lat, UR_lon, UR_lat)
pixel_size_x : float
Pixel width in projection units
pixel_size_y : float
Pixel height in projection units
pixel_upper_left : list
Coordinates (x, y) of center of upper left pixel in projection units
pixel_offset_x : float
x offset between projection center and upper left corner of upper
left pixel in units of pixels.
pixel_offset_y : float
y offset between projection center and upper left corner of upper
left pixel in units of pixels..
Properties:
proj4_string : str
Projection defined as Proj.4 string
lons : object
Grid lons
lats : object
Grid lats
cartesian_coords : object
Grid cartesian coordinates
projection_x_coords : object
Grid projection x coordinate
projection_y_coords : object
Grid projection y coordinate
"""
def __init__(self, area_id, name, proj_id, proj_dict, x_size, y_size,
area_extent, nprocs=1, lons=None, lats=None, dtype=np.float64):
if not isinstance(proj_dict, dict):
raise TypeError('Wrong type for proj_dict: %s. Expected dict.'
% type(proj_dict))
super(AreaDefinition, self).__init__(lons, lats, nprocs)
self.area_id = area_id
self.name = name
self.proj_id = proj_id
self.x_size = x_size
self.y_size = y_size
self.shape = (y_size, x_size)
if lons is not None:
if lons.shape != self.shape:
raise ValueError('Shape of lon lat grid must match '
'area definition')
self.size = y_size * x_size
self.ndim = 2
self.pixel_size_x = (area_extent[2] - area_extent[0]) / float(x_size)
self.pixel_size_y = (area_extent[3] - area_extent[1]) / float(y_size)
self.proj_dict = proj_dict
self.area_extent = tuple(area_extent)
# Calculate area_extent in lon lat
proj = _spatial_mp.Proj(**proj_dict)
corner_lons, corner_lats = proj((area_extent[0], area_extent[2]),
(area_extent[1], area_extent[3]),
inverse=True)
self.area_extent_ll = (corner_lons[0], corner_lats[0],
corner_lons[1], corner_lats[1])
# Calculate projection coordinates of center of upper left pixel
self.pixel_upper_left = \
(float(area_extent[0]) +
float(self.pixel_size_x) / 2,
float(area_extent[3]) -
float(self.pixel_size_y) / 2)
# Pixel_offset defines the distance to projection center from origen (UL)
# of image in units of pixels.
self.pixel_offset_x = -self.area_extent[0] / self.pixel_size_x
self.pixel_offset_y = self.area_extent[3] / self.pixel_size_y
self.projection_x_coords = None
self.projection_y_coords = None
self.dtype = dtype
def __str__(self):
# We need a sorted dictionary for a unique hash of str(self)
proj_dict = self.proj_dict
proj_str = ('{' +
', '.join(["'%s': '%s'" % (str(k), str(proj_dict[k]))
for k in sorted(proj_dict.keys())]) +
'}')
return ('Area ID: %s\nName: %s\nProjection ID: %s\n'
'Projection: %s\nNumber of columns: %s\nNumber of rows: %s\n'
'Area extent: %s') % (self.area_id, self.name, self.proj_id,
proj_str, self.x_size, self.y_size,
self.area_extent)
__repr__ = __str__
def __eq__(self, other):
"""Test for equality"""
try:
return ((self.proj_dict == other.proj_dict) and
(self.shape == other.shape) and
(self.area_extent == other.area_extent))
except AttributeError:
return super(AreaDefinition, self).__eq__(other)
def __ne__(self, other):
"""Test for equality"""
return not self.__eq__(other)
def get_xy_from_lonlat(self, lon, lat):
"""Retrieve closest x and y coordinates (column, row indices) for the
specified geolocation (lon,lat) if inside area. If lon,lat is a point a
ValueError is raised if the return point is outside the area domain. If
lon,lat is a tuple of sequences of longitudes and latitudes, a tuple of
masked arrays are returned.
:Input:
lon : point or sequence (list or array) of longitudes
lat : point or sequence (list or array) of latitudes
:Returns:
(x, y) : tuple of integer points/arrays
"""
if isinstance(lon, list):
lon = np.array(lon)
if isinstance(lat, list):
lat = np.array(lat)
if ((isinstance(lon, np.ndarray) and
not isinstance(lat, np.ndarray)) or
(not isinstance(lon, np.ndarray) and
isinstance(lat, np.ndarray))):
raise ValueError("Both lon and lat needs to be of " +
"the same type and have the same dimensions!")
if isinstance(lon, np.ndarray) and isinstance(lat, np.ndarray):
if lon.shape != lat.shape:
raise ValueError("lon and lat is not of the same shape!")
pobj = _spatial_mp.Proj(self.proj4_string)
upl_x = self.area_extent[0]
upl_y = self.area_extent[3]
xscale = abs(self.area_extent[2] -
self.area_extent[0]) / float(self.x_size)
yscale = abs(self.area_extent[1] -
self.area_extent[3]) / float(self.y_size)
xm_, ym_ = pobj(lon, lat)
x__ = (xm_ - upl_x) / xscale
y__ = (upl_y - ym_) / yscale
if isinstance(x__, np.ndarray) and isinstance(y__, np.ndarray):
mask = (((x__ < 0) | (x__ > self.x_size)) |
((y__ < 0) | (y__ > self.y_size)))
return (np.ma.masked_array(x__.astype('int'), mask=mask,
fill_value=-1),
np.ma.masked_array(y__.astype('int'), mask=mask,
fill_value=-1))
else:
if ((x__ < 0 or x__ > self.x_size) or
(y__ < 0 or y__ > self.y_size)):
raise ValueError('Point outside area:( %f %f)' % (x__, y__))
return int(x__), int(y__)
def get_lonlat(self, row, col):
"""Retrieves lon and lat values of single point in area grid
:Parameters:
row : int
col : int
:Returns:
(lon, lat) : tuple of floats
"""
return self.get_lonlats(nprocs=None, data_slice=(row, col))
def get_proj_coords(self, data_slice=None, cache=False, dtype=None):
"""Get projection coordinates of grid
:Parameters:
data_slice : slice object, optional
Calculate only coordinates for specified slice
cache : bool, optional
Store result the result. Requires data_slice to be None
:Returns:
(target_x, target_y) : tuple of numpy arrays
Grids of area x- and y-coordinates in projection units
"""
def get_val(val, sub_val, max):
# Get value with substitution and wrapping
if val is None:
return sub_val
else:
if val < 0:
# Wrap index
return max + val
else:
return val
if self.projection_x_coords is not None and self.projection_y_coords is not None:
# Projection coords are cached
if data_slice is None:
return self.projection_x_coords, self.projection_y_coords
else:
return self.projection_x_coords[data_slice], self.projection_y_coords[data_slice]
is_single_value = False
is_1d_select = False
if dtype is None:
dtype = self.dtype
# create coordinates of local area as ndarrays
if data_slice is None or data_slice == slice(None):
# Full slice
rows = self.y_size
cols = self.x_size
row_start = 0
col_start = 0
else:
if isinstance(data_slice, slice):
# Row slice
row_start = get_val(data_slice.start, 0, self.y_size)
col_start = 0
rows = get_val(
data_slice.stop, self.y_size, self.y_size) - row_start
cols = self.x_size
elif isinstance(data_slice[0], slice) and isinstance(data_slice[1], slice):
# Block slice
row_start = get_val(data_slice[0].start, 0, self.y_size)
col_start = get_val(data_slice[1].start, 0, self.x_size)
rows = get_val(
data_slice[0].stop, self.y_size, self.y_size) - row_start
cols = get_val(
data_slice[1].stop, self.x_size, self.x_size) - col_start
elif isinstance(data_slice[0], slice):
# Select from col
is_1d_select = True
row_start = get_val(data_slice[0].start, 0, self.y_size)
col_start = get_val(data_slice[1], 0, self.x_size)
rows = get_val(
data_slice[0].stop, self.y_size, self.y_size) - row_start
cols = 1
elif isinstance(data_slice[1], slice):
# Select from row
is_1d_select = True
row_start = get_val(data_slice[0], 0, self.y_size)
col_start = get_val(data_slice[1].start, 0, self.x_size)
rows = 1
cols = get_val(
data_slice[1].stop, self.x_size, self.x_size) - col_start
else:
# Single element select
is_single_value = True
row_start = get_val(data_slice[0], 0, self.y_size)
col_start = get_val(data_slice[1], 0, self.x_size)
rows = 1
cols = 1
# Calculate coordinates
target_x = np.fromfunction(lambda i, j: (j + col_start) *
self.pixel_size_x +
self.pixel_upper_left[0],
(rows,
cols), dtype=dtype)
target_y = np.fromfunction(lambda i, j:
self.pixel_upper_left[1] -
(i + row_start) * self.pixel_size_y,
(rows,
cols), dtype=dtype)
if is_single_value:
# Return single values
target_x = float(target_x)
target_y = float(target_y)
elif is_1d_select:
# Reshape to 1D array
target_x = target_x.reshape((target_x.size,))
target_y = target_y.reshape((target_y.size,))
if cache and data_slice is None:
# Cache the result if requested
self.projection_x_coords = target_x
self.projection_y_coords = target_y
return target_x, target_y
@property
def proj_x_coords(self):
return self.get_proj_coords(data_slice=(0, slice(None)))[0]
@property
def proj_y_coords(self):
return self.get_proj_coords(data_slice=(slice(None), 0))[1]
@property
def outer_boundary_corners(self):
"""Returns the lon,lat of the outer edges of the corner points
"""
from pyresample.spherical_geometry import Coordinate
proj = _spatial_mp.Proj(**self.proj_dict)
corner_lons, corner_lats = proj((self.area_extent[0], self.area_extent[2],
self.area_extent[2], self.area_extent[0]),
(self.area_extent[3], self.area_extent[3],
self.area_extent[1], self.area_extent[1]),
inverse=True)
return [Coordinate(corner_lons[0], corner_lats[0]),
Coordinate(corner_lons[1], corner_lats[1]),
Coordinate(corner_lons[2], corner_lats[2]),
Coordinate(corner_lons[3], corner_lats[3])]
def get_lonlats(self, nprocs=None, data_slice=None, cache=False, dtype=None):
"""Returns lon and lat arrays of area.
:Parameters:
nprocs : int, optional
Number of processor cores to be used.
Defaults to the nprocs set when instantiating object
data_slice : slice object, optional
Calculate only coordinates for specified slice
cache : bool, optional
Store result the result. Requires data_slice to be None
:Returns:
(lons, lats) : tuple of numpy arrays
Grids of area lons and and lats
"""
if dtype is None:
dtype = self.dtype
if self.lons is None or self.lats is None:
#Data is not cached
if nprocs is None:
nprocs = self.nprocs
# Proj.4 definition of target area projection
if nprocs > 1:
target_proj = _spatial_mp.Proj_MP(**self.proj_dict)
else:
target_proj = _spatial_mp.Proj(**self.proj_dict)
# Get coordinates of local area as ndarrays
target_x, target_y = self.get_proj_coords(
data_slice=data_slice, dtype=dtype)
# Get corresponding longitude and latitude values
lons, lats = target_proj(target_x, target_y, inverse=True,
nprocs=nprocs)
lons = np.asanyarray(lons, dtype=dtype)
lats = np.asanyarray(lats, dtype=dtype)
if cache and data_slice is None:
# Cache the result if requested
self.lons = lons
self.lats = lats
# Free memory
del(target_x)
del(target_y)
else:
#Data is cached
if data_slice is None:
# Full slice
lons = self.lons
lats = self.lats
else:
lons = self.lons[data_slice]
lats = self.lats[data_slice]
return lons, lats
@property
def proj4_string(self):
"""Returns projection definition as Proj.4 string"""
items = self.proj_dict.items()
return '+' + ' +'.join([t[0] + '=' + t[1] for t in items])
def _get_slice(segments, shape):
"""Generator for segmenting a 1D or 2D array"""
if not (1 <= len(shape) <= 2):
raise ValueError('Cannot segment array of shape: %s' % str(shape))
else:
size = shape[0]
slice_length = np.ceil(float(size) / segments)
start_idx = 0
end_idx = slice_length
while start_idx < size:
if len(shape) == 1:
yield slice(start_idx, end_idx)
else:
yield (slice(start_idx, end_idx), slice(None))
start_idx = end_idx
end_idx = min(start_idx + slice_length, size)
def _flatten_cartesian_coords(cartesian_coords):
"""Flatten array to (n, 3) shape"""
shape = cartesian_coords.shape
if len(shape) > 2:
cartesian_coords = cartesian_coords.reshape(shape[0] *
shape[1], 3)
return cartesian_coords
def _get_highest_level_class(obj1, obj2):
if (not issubclass(obj1.__class__, obj2.__class__) or
not issubclass(obj2.__class__, obj1.__class__)):
raise TypeError('No common superclass for %s and %s' %
(obj1.__class__, obj2.__class__))
if obj1.__class__ == obj2.__class__:
klass = obj1.__class__
elif issubclass(obj1.__class__, obj2.__class__):
klass = obj2.__class__
else:
klass = obj1.__class__
return klass
|
jhamman/pyresample
|
pyresample/geometry.py
|
Python
|
lgpl-3.0
| 31,169
|
#!/usr/bin/env python
import argparse
import datetime
import os
import os.path as osp
os.environ['MPLBACKEND'] = 'Agg' # NOQA
import chainer
import fcn
from train_fcn32s import get_data
from train_fcn32s import get_trainer
here = osp.dirname(osp.abspath(__file__))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-g', '--gpu', type=int, required=True, help='gpu id')
parser.add_argument(
'--fcn32s-file', default=fcn.models.FCN32s.pretrained_model,
help='pretrained model file of FCN32s')
args = parser.parse_args()
args.model = 'FCN16s'
args.lr = 1.0e-12
args.momentum = 0.99
args.weight_decay = 0.0005
args.max_iteration = 100000
args.interval_print = 20
args.interval_eval = 4000
now = datetime.datetime.now()
args.timestamp = now.isoformat()
args.out = osp.join(here, 'logs', now.strftime('%Y%m%d_%H%M%S'))
# data
class_names, iter_train, iter_valid, iter_valid_raw = get_data()
n_class = len(class_names)
# model
fcn32s = fcn.models.FCN32s()
chainer.serializers.load_npz(args.fcn32s_file, fcn32s)
model = fcn.models.FCN16s(n_class=n_class)
model.init_from_fcn32s(fcn32s)
if args.gpu >= 0:
chainer.cuda.get_device(args.gpu).use()
model.to_gpu()
# optimizer
optimizer = chainer.optimizers.MomentumSGD(
lr=args.lr, momentum=args.momentum)
optimizer.setup(model)
optimizer.add_hook(chainer.optimizer.WeightDecay(rate=args.weight_decay))
for p in model.params():
if p.name == 'b':
p.update_rule = chainer.optimizers.momentum_sgd.MomentumSGDRule(
lr=optimizer.lr * 2, momentum=0)
model.upscore2.disable_update()
model.upscore16.disable_update()
# trainer
trainer = get_trainer(optimizer, iter_train, iter_valid, iter_valid_raw,
class_names, args)
trainer.run()
if __name__ == '__main__':
main()
|
wkentaro/fcn
|
examples/voc/train_fcn16s.py
|
Python
|
mit
| 2,034
|
# -*- coding: utf-8 -*-
###############################################################################
#
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
from openerp import models, fields
class Annotation(models.Model):
_inherit = 'myo.annotation'
# code = fields.Char('Code', index=True, required=False, readonly=False, default=False,
# help='Use "/" to get an automatic new Annotation Code.')
code = fields.Char('Code', index=True, required=False, readonly=False, default='/',
help='Use "/" to get an automatic new Annotation Code.')
|
MostlyOpen/odoo_addons_jcafb
|
myo_annotation_cst/models/annotation_seq.py
|
Python
|
agpl-3.0
| 1,352
|
import pytest
from robot_server.service.legacy.models import networking
def test_validate_configuration_deduce_security_psk():
n = networking.WifiConfiguration(ssid="a", psk="abc")
expected = networking.NetworkingSecurityType.wpa_psk
assert n.securityType == expected
def test_validate_configuration_deduce_security_eap():
n = networking.WifiConfiguration(
ssid="a",
eapConfig={
'eapType': 'peap/eap-mschapv2',
'identity': 'test@hi.com',
'password': 'passwd'
}
)
expected = networking.NetworkingSecurityType.wpa_eap
assert n.securityType == expected
def test_validate_configuration_deduce_security_invalid():
"""Test that we can't deduce security when both psk and eapConfig are
present"""
with pytest.raises(ValueError):
networking.WifiConfiguration(
ssid="a",
psk="ee",
eapConfig={
'eapType': 'peap/eap-mschapv2',
'identity': 'test@hi.com',
'password': 'passwd'
}
)
def test_validate_configuration_deduce_security_none():
n = networking.WifiConfiguration(
ssid="a"
)
expected = networking.NetworkingSecurityType.none
assert n.securityType == expected
def test_validate_configuration_psk_invalid():
with pytest.raises(ValueError):
networking.WifiConfiguration(
ssid="a",
securityType="wpa-psk",
eapConfig={
'eapType': 'peap/eap-mschapv2',
'identity': 'test@hi.com',
'password': 'passwd'
}
)
def test_validate_configuration_eap_invalid():
with pytest.raises(ValueError):
networking.WifiConfiguration(
ssid="a",
securityType="wpa-eap",
psk="hohos"
)
def test_eap_config_validate_missing_eap_type():
with pytest.raises(ValueError, match="eapType must be defined"):
networking.WifiConfiguration(ssid="a", eapConfig={})
def test_eap_config_validate_invalid_eap_config():
with pytest.raises(ValueError, match="Required .+ not present"):
networking.WifiConfiguration(
ssid="a",
eapConfig={
'eapType': 'peap/eap-mschapv2'
}
)
|
Opentrons/labware
|
robot-server/tests/service/legacy/models/test_networking.py
|
Python
|
apache-2.0
| 2,327
|
"""Provide the device conditions for NEW_NAME."""
from typing import Dict, List
import voluptuous as vol
from homeassistant.const import (
ATTR_ENTITY_ID,
CONF_CONDITION,
CONF_DEVICE_ID,
CONF_DOMAIN,
CONF_ENTITY_ID,
CONF_TYPE,
STATE_OFF,
STATE_ON,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import condition, config_validation as cv, entity_registry
from homeassistant.helpers.config_validation import DEVICE_CONDITION_BASE_SCHEMA
from homeassistant.helpers.typing import ConfigType, TemplateVarsType
from . import DOMAIN
# TODO specify your supported condition types.
CONDITION_TYPES = {"is_on", "is_off"}
CONDITION_SCHEMA = DEVICE_CONDITION_BASE_SCHEMA.extend(
{
vol.Required(CONF_ENTITY_ID): cv.entity_id,
vol.Required(CONF_TYPE): vol.In(CONDITION_TYPES),
}
)
async def async_get_conditions(
hass: HomeAssistant, device_id: str
) -> List[Dict[str, str]]:
"""List device conditions for NEW_NAME devices."""
registry = await entity_registry.async_get_registry(hass)
conditions = []
# Get all the integrations entities for this device
for entry in entity_registry.async_entries_for_device(registry, device_id):
if entry.domain != DOMAIN:
continue
# Add conditions for each entity that belongs to this integration
# TODO add your own conditions.
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_on",
}
)
conditions.append(
{
CONF_CONDITION: "device",
CONF_DEVICE_ID: device_id,
CONF_DOMAIN: DOMAIN,
CONF_ENTITY_ID: entry.entity_id,
CONF_TYPE: "is_off",
}
)
return conditions
@callback
def async_condition_from_config(
config: ConfigType, config_validation: bool
) -> condition.ConditionCheckerType:
"""Create a function to test a device condition."""
if config_validation:
config = CONDITION_SCHEMA(config)
if config[CONF_TYPE] == "is_on":
state = STATE_ON
else:
state = STATE_OFF
@callback
def test_is_state(hass: HomeAssistant, variables: TemplateVarsType) -> bool:
"""Test if an entity is a certain state."""
return condition.state(hass, config[ATTR_ENTITY_ID], state)
return test_is_state
|
tboyce021/home-assistant
|
script/scaffold/templates/device_condition/integration/device_condition.py
|
Python
|
apache-2.0
| 2,573
|
from __future__ import unicode_literals
import datetime
from django.core import signing
from django.test import SimpleTestCase
from django.test.utils import freeze_time
from django.utils import six
from django.utils.encoding import force_str
class TestSigner(SimpleTestCase):
def test_signature(self):
"signature() method should generate a signature"
signer = signing.Signer('predictable-secret')
signer2 = signing.Signer('predictable-secret2')
for s in (
b'hello',
b'3098247:529:087:',
'\u2019'.encode('utf-8'),
):
self.assertEqual(
signer.signature(s),
signing.base64_hmac(signer.salt + 'signer', s,
'predictable-secret').decode()
)
self.assertNotEqual(signer.signature(s), signer2.signature(s))
def test_signature_with_salt(self):
"signature(value, salt=...) should work"
signer = signing.Signer('predictable-secret', salt='extra-salt')
self.assertEqual(
signer.signature('hello'),
signing.base64_hmac('extra-salt' + 'signer',
'hello', 'predictable-secret').decode()
)
self.assertNotEqual(
signing.Signer('predictable-secret', salt='one').signature('hello'),
signing.Signer('predictable-secret', salt='two').signature('hello'))
def test_sign_unsign(self):
"sign/unsign should be reversible"
signer = signing.Signer('predictable-secret')
examples = [
'q;wjmbk;wkmb',
'3098247529087',
'3098247:529:087:',
'jkw osanteuh ,rcuh nthu aou oauh ,ud du',
'\u2019',
]
if six.PY2:
examples.append(b'a byte string')
for example in examples:
signed = signer.sign(example)
self.assertIsInstance(signed, str)
self.assertNotEqual(force_str(example), signed)
self.assertEqual(example, signer.unsign(signed))
def unsign_detects_tampering(self):
"unsign should raise an exception if the value has been tampered with"
signer = signing.Signer('predictable-secret')
value = 'Another string'
signed_value = signer.sign(value)
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
self.assertEqual(value, signer.unsign(signed_value))
for transform in transforms:
self.assertRaises(
signing.BadSignature, signer.unsign, transform(signed_value))
def test_dumps_loads(self):
"dumps and loads be reversible for any JSON serializable object"
objects = [
['a', 'list'],
'a unicode string \u2019',
{'a': 'dictionary'},
]
if six.PY2:
objects.append(b'a byte string')
for o in objects:
self.assertNotEqual(o, signing.dumps(o))
self.assertEqual(o, signing.loads(signing.dumps(o)))
self.assertNotEqual(o, signing.dumps(o, compress=True))
self.assertEqual(o, signing.loads(signing.dumps(o, compress=True)))
def test_decode_detects_tampering(self):
"loads should raise exception for tampered objects"
transforms = (
lambda s: s.upper(),
lambda s: s + 'a',
lambda s: 'a' + s[1:],
lambda s: s.replace(':', ''),
)
value = {
'foo': 'bar',
'baz': 1,
}
encoded = signing.dumps(value)
self.assertEqual(value, signing.loads(encoded))
for transform in transforms:
self.assertRaises(
signing.BadSignature, signing.loads, transform(encoded))
def test_works_with_non_ascii_keys(self):
binary_key = b'\xe7' # Set some binary (non-ASCII key)
s = signing.Signer(binary_key)
self.assertEqual('foo:6NB0fssLW5RQvZ3Y-MTerq2rX7w', s.sign('foo'))
def test_valid_sep(self):
separators = ['/', '*sep*', ',']
for sep in separators:
signer = signing.Signer('predictable-secret', sep=sep)
self.assertEqual('foo%ssH9B01cZcJ9FoT_jEVkRkNULrl8' % sep, signer.sign('foo'))
def test_invalid_sep(self):
"""should warn on invalid separator"""
msg = 'Unsafe Signer separator: %r (cannot be empty or consist of only A-z0-9-_=)'
separators = ['', '-', 'abc']
for sep in separators:
with self.assertRaisesMessage(ValueError, msg % sep):
signing.Signer(sep=sep)
class TestTimestampSigner(SimpleTestCase):
def test_timestamp_signer(self):
value = 'hello'
with freeze_time(123456789):
signer = signing.TimestampSigner('predictable-key')
ts = signer.sign(value)
self.assertNotEqual(ts,
signing.Signer('predictable-key').sign(value))
self.assertEqual(signer.unsign(ts), value)
with freeze_time(123456800):
self.assertEqual(signer.unsign(ts, max_age=12), value)
# max_age parameter can also accept a datetime.timedelta object
self.assertEqual(signer.unsign(ts, max_age=datetime.timedelta(seconds=11)), value)
self.assertRaises(signing.SignatureExpired, signer.unsign, ts, max_age=10)
|
WillGuan105/django
|
tests/signing/tests.py
|
Python
|
bsd-3-clause
| 5,473
|
from .downloader import Downloader
async def setup(bot):
cog = Downloader(bot)
bot.add_cog(cog)
cog.create_init_task()
|
palmtree5/Red-DiscordBot
|
redbot/cogs/downloader/__init__.py
|
Python
|
gpl-3.0
| 133
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Alex
# @Date: 2015-11-16 19:21:37
# @Last Modified by: Alex
# @Last Modified time: 2015-12-21 22:10:00
from django.conf.urls import url
from .views import (PurchOrderListView,
PurchOrderRecentListView,
PurchOrderOpenListView,
PurchOrderReceivedListView,
PurchOrderPaidListView,
createPurchOrderView,
updatePurchOrderView,
DeletePurchOrderView,
export_csv, export_pdf)
urlpatterns = [
url(r'^purch_orders/$', PurchOrderListView.as_view(), name='list'),
url(r'^purch_orders/recent/$',
PurchOrderRecentListView.as_view(), name='recent'),
url(r'^purch_orders/open/$',
PurchOrderOpenListView.as_view(), name='open'),
url(r'^purch_orders/received/$',
PurchOrderReceivedListView.as_view(), name='received'),
url(r'^purch_orders/paid/$',
PurchOrderPaidListView.as_view(), name='paid'),
url(r'^purch_orders/new/$', createPurchOrderView, name='new'),
url(r'^purch_orders/update/(?P<PurchId>[-\w]+)/$',
updatePurchOrderView, name='update'),
url(r'^purch_orders/delete/(?P<pk>[-\w]+)/$',
DeletePurchOrderView.as_view(), name='delete'),
url(r'^purch_orders/export/cvs/$', export_csv, name='csv'),
url(r'^purch_orders/export/pdf/$', export_pdf, name='pdf'),
]
|
alexharmenta/Inventationery
|
Inventationery/apps/PurchOrder/urls.py
|
Python
|
bsd-3-clause
| 1,457
|
# -*- coding: utf-8 -*-
#
# exercise 5: more variables and printing
#
# string formating
name = 'Zed A. Shaw'
ages = 35 # not a lie
height = 74 # inched
weight = 180 # lbs
eyes = 'Blue'
teeth = 'White'
hair = 'Brown'
print "Let's talk about %s." % name
print "He's %d inched tall." % height
print "He's %d pounds heavy." % weight
print "Actually that's not too heavy."
print "He's got %s eyes and %s hair." % (eyes, hair)
print "His teeth are usually %s depending on the coffee." % teeth
# this line is tricky, try to get it exactly right
print "If I add %d, %d, and %d I get %d." %(
ages, height, weight, ages + height + weight)
|
zstang/learning-python-the-hard-way
|
ex5.py
|
Python
|
mit
| 635
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.