code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
# System imports
import os
from os.path import join
import pytest
from PyGitUp.git_wrapper import GitError
from PyGitUp.tests import basepath
test_name = 'git-not-in-path'
repo_path = join(basepath, test_name + os.sep)
def setup():
os.makedirs(repo_path, 0o700)
def test_not_a_git_repo():
""" Run 'git up' with git no being in PATH """
os.chdir(repo_path)
environ = os.environ.copy()
os.environ['PATH'] = ''
try:
with pytest.raises(GitError, match="The git executable could not be "
"found"):
from PyGitUp.gitup import GitUp
GitUp(testing=True)
finally:
os.environ.update(environ)
|
msiemens/PyGitUp
|
PyGitUp/tests/test_git_not_in_path.py
|
Python
|
mit
| 701
|
"""
This script allows you to delete articles in bulk from a query or from a csv.
If you provide a csv, you may also provide a second column containing the delete action, which may be one of:
* delete - actually delete the article
* remove_doi - keep the article, but remove its DOI
* remove_fulltext - keep the article, but remove its Fulltext URLs
"""
from portality import models
import json, csv
from portality.core import app
from portality import constants
def remove_doi(article_id):
article = models.Article.pull(article_id)
try:
# first ensure that it has a fulltext
fts = article.bibjson().get_urls(urltype=constants.LINK_TYPE_FULLTEXT)
if len(fts) > 0:
article.bibjson().remove_identifiers(idtype=constants.IDENT_TYPE_DOI)
article.save()
else:
print("WARN: could not remove DOI from {0} as it has no fulltext URL".format(article_id))
except AttributeError as e:
print("ERROR: could not remove DOI from {0}: {1}".format(article_id, str(e)))
def remove_fulltext(article_id):
article = models.Article.pull(article_id)
try:
# first ensure it has a DOI
dois = article.bibjson().get_identifiers(idtype=constants.IDENT_TYPE_DOI)
if len(dois) > 0:
article.bibjson().remove_urls(urltype=constants.LINK_TYPE_FULLTEXT)
article.save()
else:
print("WARN: could not remove Fulltext from {0} as it has no DOI".format(article_id))
except AttributeError as e:
print("ERROR: could not remove fulltext from {0}: {1}".format(article_id, str(e)))
if __name__ == "__main__":
if app.config.get("SCRIPTS_READ_ONLY_MODE", False):
print("System is in READ-ONLY mode, script cannot run")
exit(1)
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-u", "--username", help="username of user whose articles to remove.")
parser.add_argument("-g", "--ghost", help="specify if you want the articles being deleted not to be snapshot", action="store_true")
parser.add_argument("-q", "--query", help="file page of json document containing delete-by query")
parser.add_argument("-ip", "--ignore-paging", help="ignore the from: and size: parameters in a query object", action="store_true")
parser.add_argument("-c", "--csv", help="csv containing article ids to remove")
args = parser.parse_args()
if not args.username and not args.query and not args.csv:
print("Please specify a username with the -u option, or a query file with the -q option, or a csv with the -c option")
exit(1)
exclusives = 0
if args.username: exclusives += 1
if args.query: exclusives += 1
if args.csv: exclusives += 1
if exclusives > 1:
print("You may only specify a username, a query or a csv alone, not combinations.")
exit(1)
snapshot = not args.ghost
if args.username is not None:
models.Article.delete_selected(owner=args.username, snapshot=snapshot)
print("Articles deleted")
elif args.query is not None:
f = open(args.query)
query = json.loads(f.read())
if args.ignore_paging:
try:
del query['from']
del query['size']
except KeyError:
pass
if 'sort' in query:
print('You can\'t have "sort" in the query, it breaks ES delete by query. Removing your sort.')
del query['sort']
res = models.Article.query(q=query)
total = res.get("hits", {}).get("total", {}).get('value', 0)
# NOTE: if you have paging, like from: and size: in a query, the
# hits['total'] will show you all results that match the query,
# not just the articles that will actually be deleted (which
# will be just the page of results specified by from: and size:).
go_on = input("This will delete " + str(total) + " articles. Are you sure? [Y/N]:")
if go_on.lower() == "y":
models.Article.delete_selected(query=query, snapshot=snapshot)
print("Articles deleted")
else:
print("Aborted")
elif args.csv is not None:
with open(args.csv) as f:
reader = csv.reader(f)
for row in reader:
article_id = row[0]
action = "delete"
if len(row) > 1:
action = row[1]
if action == "delete":
models.Article.remove_by_id(article_id)
elif action == "remove_doi":
remove_doi(article_id)
elif action == "remove_fulltext":
remove_fulltext(article_id)
|
DOAJ/doaj
|
portality/scripts/articlerm.py
|
Python
|
apache-2.0
| 4,752
|
#!/usr/bin/python
#Targets: create screen, load image to surface, move surface on the screen until any key is pressed
import pygame
import time
pygame.init()
screen_size=(640,480)
disp=pygame.display.set_mode(screen_size)
face=pygame.image.load('1.png').convert()
default_font=pygame.font.get_default_font()
font=pygame.font.SysFont(default_font,32)
msg=font.render("Press SPACE to exit",True,(250,230,210,127))
noface=face.__copy__()
noface.fill((0,0,0,0))
disp.fill((0,0,0,0))
x=0
y=0
alpha=0
while(True):
begin=time.time()
#remove previous face
disp.blit(noface,(x,y))
#calc new position and alpha
x=(x+(screen_size[0]/100))%screen_size[0]
y=(y+(screen_size[1]/100))%screen_size[1]
alpha=(alpha+1)%256
face.set_alpha(alpha)
disp.blit(face,(x,y))
#message here
disp.blit(msg,(0,0))
pygame.display.update()
pygame.event.pump()
if pygame.key.get_pressed()[pygame.K_SPACE]:
break
time.sleep(max(0.001,time.time()-begin+0.02))
|
amarao/fun_came
|
learn/pygame_learn/pygame_1st.py
|
Python
|
gpl-3.0
| 1,000
|
from datetime import datetime
import mock
import pytz
import time
import urllib2
import zeit.cms.checkout.interfaces
import zeit.cms.testing
import zope.app.locking.lockinfo
class TimeFreezeLockInfo(zope.app.locking.lockinfo.LockInfo):
def __init__(self, *args, **kw):
super(TimeFreezeLockInfo, self).__init__(*args, **kw)
self.created = time.mktime(
datetime(2019, 4, 15, 18, 20, tzinfo=pytz.UTC).timetuple())
class LockAPI(zeit.cms.testing.ZeitCmsBrowserTestCase):
def setUp(self):
super(LockAPI, self).setUp()
# API is available without authentication
self.browser = zeit.cms.testing.Browser()
def test_status_200_for_unlocked(self):
b = self.browser
b.open('http://localhost/@@lock_status'
'?uniqueId=http://xml.zeit.de/testcontent')
self.assertEqual('200 Ok', b.headers['Status'])
self.assert_json({'locked': False, 'owner': None, 'until': None})
def test_status_409_for_locked(self):
with mock.patch('zope.app.locking.adapter.LockInfo',
new=TimeFreezeLockInfo):
zeit.cms.checkout.interfaces.ICheckoutManager(
self.repository['testcontent']).checkout()
b = self.browser
with self.assertRaises(urllib2.HTTPError) as info:
b.open('http://localhost/@@lock_status'
'?uniqueId=http://xml.zeit.de/testcontent')
self.assertEqual(409, info.exception.status)
self.assert_json({'locked': True, 'owner': 'zope.user',
'until': '2019-04-15T18:20:00+00:00'})
def test_resolves_uuid(self):
b = self.browser
# mock connector search() always returns
# http://xml.zeit.de/online/2007/01/Somalia
b.open('http://localhost/@@lock_status?uuid=dummy')
self.assertEqual('200 Ok', b.headers['Status'])
def test_status_404_for_nonexistent(self):
b = self.browser
with self.assertRaises(urllib2.HTTPError) as info:
b.open('http://localhost/@@lock_status'
'?uniqueId=http://xml.zeit.de/nonexistent')
self.assertEqual(404, info.exception.status)
with self.assertRaises(urllib2.HTTPError) as info:
with mock.patch('zeit.connector.mock.Connector.search') as search:
search.return_value = None
b.open('http://localhost/@@lock_status?uuid=dummy')
self.assertEqual(404, info.exception.status)
|
ZeitOnline/zeit.cms
|
src/zeit/cms/locking/browser/tests/test_lock.py
|
Python
|
bsd-3-clause
| 2,514
|
import hildon
import gtk
import pge_window
import cPickle
import pge_window
import os
class Prefs():
def __init__(self):
self.prefs_dict = {}
def load(self):
try:
f = open(os.path.expanduser("~")+"/.pygtkeditor",'r')
self.prefs_dict = cPickle.load(f)
if not self.prefs_dict.has_key('auto_rotate'):
self.prefs_dict['auto_rotate']=True
if not self.prefs_dict.has_key('show_lines'):
self.prefs_dict['show_lines']=False
if not self.prefs_dict.has_key('indent'):
self.prefs_dict['indent']=' '
if not self.prefs_dict.has_key('auto_clean_line_end'):
self.prefs_dict['auto_clean_line_end']=False
if not self.prefs_dict.has_key('theme'):
self.prefs_dict['theme']='default'
except:
self.default()
def default(self):
self.prefs_dict['hildon_text_completion']=True
self.prefs_dict['default_language']='python'
self.prefs_dict['font_name']='Monospace'
self.prefs_dict['font_size']='12'
self.prefs_dict['auto_rotate']=True
self.prefs_dict['show_lines']=False
self.prefs_dict['indent']=' '
self.prefs_dict['auto_clean_line_end']=False
def store(self):
f = open(os.path.expanduser("~")+"/.pygtkeditor",'w')
prefs = cPickle.dump(self.prefs_dict,f)
def edit(self,parent_window):
dialog = gtk.Dialog('PyGTKEditor - Settings',parent_window,gtk.DIALOG_DESTROY_WITH_PARENT,(gtk.STOCK_OK,gtk.RESPONSE_ACCEPT))
#hildon_text_completion
w_hildon_text_completion = hildon.CheckButton(gtk.HILDON_SIZE_AUTO)
w_hildon_text_completion.set_label('Hildon Text Completion')
if self.prefs_dict.has_key('hildon_text_completion'):
w_hildon_text_completion.set_active((self.prefs_dict['hildon_text_completion']==True))
#show lines numbers
w_show_lines = hildon.CheckButton(gtk.HILDON_SIZE_AUTO)
w_show_lines.set_label('Show lines numbers')
if self.prefs_dict.has_key('show_lines'):
w_show_lines.set_active((self.prefs_dict['show_lines']==True))
#auto clean line end
w_auto_clean_line_end = hildon.CheckButton(gtk.HILDON_SIZE_AUTO)
w_auto_clean_line_end.set_label('Auto clean line end (on save)')
if self.prefs_dict.has_key('auto_clean_line_end'):
w_auto_clean_line_end.set_active((self.prefs_dict['auto_clean_line_end']==True))
#auto_rotate
w_auto_rotate = hildon.CheckButton(gtk.HILDON_SIZE_AUTO)
w_auto_rotate.set_label('Auto Portrait Mode')
if self.prefs_dict.has_key('auto_rotate'):
w_auto_rotate.set_active((self.prefs_dict['auto_rotate']==True))
#default_language
w_default_language = hildon.PickerButton(gtk.HILDON_SIZE_AUTO,
hildon.BUTTON_ARRANGEMENT_VERTICAL)
w_default_language.set_title("Default Language")
w_default_language_selector = hildon.TouchSelectorEntry(text=True)
languages = ['None']
print self.prefs_dict['default_language']
for ext,language in pge_window.LANGUAGES:
languages.append(language)
for language in languages:
w_default_language_selector.append_text(language)
w_default_language.set_selector(w_default_language_selector)
if self.prefs_dict.has_key('default_language'):
w_default_language.set_active(languages.index(self.prefs_dict['default_language']))
#Font Button
w_font = hildon.PickerButton(gtk.HILDON_SIZE_AUTO,
hildon.BUTTON_ARRANGEMENT_VERTICAL)
w_font.set_title("Font")
w_font_selector = hildon.TouchSelectorEntry(text=True)
c = parent_window.create_pango_context()
families = c.list_families()
font_names = []
for f in families:
font_names.append(f.get_name())
for f in font_names:
w_font_selector.append_text(f)
w_font.set_selector(w_font_selector)
if self.prefs_dict.has_key('font_name'):
w_font.set_active(font_names.index(self.prefs_dict['font_name']))
w_font.set_value(self.prefs_dict['font_name'])
#Font Size Button
w_font_size = hildon.PickerButton(gtk.HILDON_SIZE_AUTO,
hildon.BUTTON_ARRANGEMENT_VERTICAL)
w_font_size.set_title("Size")
w_font_size_selector = hildon.TouchSelectorEntry(text=True)
font_sizes = []
for f in range(7,49):
font_sizes.append(str(f))
for f in font_sizes:
w_font_size_selector.append_text(f)
w_font_size.set_selector(w_font_size_selector)
if self.prefs_dict.has_key('font_size'):
w_font_size.set_active(font_sizes.index(self.prefs_dict['font_size']))
w_font_size.set_value(self.prefs_dict['font_size'])
#Indent Button
w_indent = hildon.PickerButton(gtk.HILDON_SIZE_AUTO,
hildon.BUTTON_ARRANGEMENT_VERTICAL)
w_indent.set_title("Indent Style")
w_indent_selector = hildon.TouchSelectorEntry(text=True)
indent_style = ['2 spaces','4 spaces','Tabulation']
indent_value = [' ',' ','\t']
for i in range(3):
w_indent_selector.append_text(indent_style[i])
w_indent.set_selector(w_indent_selector)
if self.prefs_dict.has_key('indent'):
print self.prefs_dict['indent']
w_indent.set_active(indent_value.index(self.prefs_dict['indent']))
w_indent.set_value(indent_style[indent_value.index(self.prefs_dict['indent'])])
else:
w_indent.set_active(0)
w_indent.set_value(indent_style[0])
#Theme Button
w_theme = hildon.PickerButton(gtk.HILDON_SIZE_AUTO,
hildon.BUTTON_ARRANGEMENT_VERTICAL)
w_theme.set_title("Syntax Hilight Theme")
w_theme_selector = hildon.TouchSelectorEntry(text=True)
theme_list = ['default','dark',]
for i in theme_list :
w_theme_selector.append_text(i)
w_theme.set_selector(w_theme_selector)
if self.prefs_dict.has_key('theme'):
w_theme.set_active(theme_list.index(self.prefs_dict['theme']))
w_theme.set_value(self.prefs_dict['theme'])
else:
w_theme.set_active(0)
w_theme.set_value(theme_list[0])
p2 = hildon.PannableArea()
p = gtk.VBox()
# dialog.vbox.add(w_hildon_text_completion)
# dialog.vbox.add(w_show_lines)
# dialog.vbox.add(w_auto_rotate)
# dialog.vbox.add(w_default_language)
# hbox = gtk.HBox()
# hbox.add(w_font)
# hbox.add(w_font_size)
# dialog.vbox.add(hbox)
# dialog.vbox.add(w_indent)
# dialog.vbox.add(w_auto_clean_line_end)
p.add(w_hildon_text_completion)
p.add(w_show_lines)
p.add(w_auto_rotate)
p.add(w_default_language)
hbox = gtk.HBox()
hbox.add(w_font)
hbox.add(w_font_size)
p.add(hbox)
p.add(w_indent)
p.add(w_auto_clean_line_end)
p.add(w_theme)
p2.set_size_request(-1,300)
p2.add_with_viewport(p)
dialog.vbox.add(p2)
# p1 = hildon.PannableArea()
#p1.set_size_request(600,400)
# p1.add(vbox)
# dialog.get_child().get_child().add(p1)
# dialog.vbox.add(p1)
dialog.show_all()
if(dialog.run()==gtk.RESPONSE_ACCEPT):
self.prefs_dict['hildon_text_completion']=w_hildon_text_completion.get_active()
self.prefs_dict['auto_rotate']=w_auto_rotate.get_active()
self.prefs_dict['show_lines']=w_show_lines.get_active()
self.prefs_dict['default_language']= w_default_language_selector.get_current_text()
self.prefs_dict['font_name']= w_font_selector.get_current_text()
self.prefs_dict['font_size']= w_font_size_selector.get_current_text()
self.prefs_dict['indent']= indent_value[w_indent_selector.get_active(0)]
self.prefs_dict['auto_clean_line_end']=w_auto_clean_line_end.get_active()
self.prefs_dict['theme']= w_theme_selector.get_current_text()
self.store()
parent_window._parent.apply_prefs()
dialog.destroy()
if __name__ == "__main__":
prefs = Prefs()
prefs.load()
prefs.edit(hildon.Window())
|
khertan/PyGTKEditor
|
pge_preferences.py
|
Python
|
gpl-3.0
| 7,831
|
# pylint: skip-file
# flake8: noqa
class Repoquery(RepoqueryCLI):
''' Class to wrap the repoquery
'''
# pylint: disable=too-many-arguments,too-many-instance-attributes
def __init__(self, name, query_type, show_duplicates,
match_version, ignore_excluders, verbose):
''' Constructor for YumList '''
super(Repoquery, self).__init__(None)
self.name = name
self.query_type = query_type
self.show_duplicates = show_duplicates
self.match_version = match_version
self.ignore_excluders = ignore_excluders
self.verbose = verbose
if self.match_version:
self.show_duplicates = True
self.query_format = "%{version}|%{release}|%{arch}|%{repo}|%{version}-%{release}"
self.tmp_file = None
def build_cmd(self):
''' build the repoquery cmd options '''
repo_cmd = []
repo_cmd.append("--pkgnarrow=" + self.query_type)
repo_cmd.append("--queryformat=" + self.query_format)
if self.show_duplicates:
repo_cmd.append('--show-duplicates')
if self.ignore_excluders:
repo_cmd.append('--config=' + self.tmp_file.name)
repo_cmd.append(self.name)
return repo_cmd
@staticmethod
def process_versions(query_output):
''' format the package data into something that can be presented '''
version_dict = defaultdict(dict)
for version in query_output.decode().split('\n'):
pkg_info = version.split("|")
pkg_version = {}
pkg_version['version'] = pkg_info[0]
pkg_version['release'] = pkg_info[1]
pkg_version['arch'] = pkg_info[2]
pkg_version['repo'] = pkg_info[3]
pkg_version['version_release'] = pkg_info[4]
version_dict[pkg_info[4]] = pkg_version
return version_dict
def format_versions(self, formatted_versions):
''' Gather and present the versions of each package '''
versions_dict = {}
versions_dict['available_versions_full'] = list(formatted_versions.keys())
# set the match version, if called
if self.match_version:
versions_dict['matched_versions_full'] = []
versions_dict['requested_match_version'] = self.match_version
versions_dict['matched_versions'] = []
# get the "full version (version - release)
versions_dict['available_versions_full'].sort(key=LooseVersion)
versions_dict['latest_full'] = versions_dict['available_versions_full'][-1]
# get the "short version (version)
versions_dict['available_versions'] = []
for version in versions_dict['available_versions_full']:
versions_dict['available_versions'].append(formatted_versions[version]['version'])
if self.match_version:
if version.startswith(self.match_version):
versions_dict['matched_versions_full'].append(version)
versions_dict['matched_versions'].append(formatted_versions[version]['version'])
versions_dict['available_versions'].sort(key=LooseVersion)
versions_dict['latest'] = versions_dict['available_versions'][-1]
# finish up the matched version
if self.match_version:
if versions_dict['matched_versions_full']:
versions_dict['matched_version_found'] = True
versions_dict['matched_versions'].sort(key=LooseVersion)
versions_dict['matched_version_latest'] = versions_dict['matched_versions'][-1]
versions_dict['matched_version_full_latest'] = versions_dict['matched_versions_full'][-1]
else:
versions_dict['matched_version_found'] = False
versions_dict['matched_versions'] = []
versions_dict['matched_version_latest'] = ""
versions_dict['matched_version_full_latest'] = ""
return versions_dict
def repoquery(self):
'''perform a repoquery '''
if self.ignore_excluders:
# Duplicate yum.conf and reset exclude= line to an empty string
# to clear a list of all excluded packages
self.tmp_file = tempfile.NamedTemporaryFile()
with open("/etc/yum.conf", "r") as file_handler:
yum_conf_lines = file_handler.readlines()
yum_conf_lines = [l for l in yum_conf_lines if not l.startswith("exclude=")]
with open(self.tmp_file.name, "w") as file_handler:
file_handler.writelines(yum_conf_lines)
file_handler.flush()
repoquery_cmd = self.build_cmd()
rval = self._repoquery_cmd(repoquery_cmd, True, 'raw')
# check to see if there are actual results
rval['package_name'] = self.name
if rval['results']:
processed_versions = Repoquery.process_versions(rval['results'].strip())
formatted_versions = self.format_versions(processed_versions)
rval['package_found'] = True
rval['versions'] = formatted_versions
if self.verbose:
rval['raw_versions'] = processed_versions
else:
del rval['results']
# No packages found
else:
rval['package_found'] = False
if self.ignore_excluders:
self.tmp_file.close()
return rval
@staticmethod
def run_ansible(params, check_mode):
'''run the ansible idempotent code'''
repoquery = Repoquery(
params['name'],
params['query_type'],
params['show_duplicates'],
params['match_version'],
params['ignore_excluders'],
params['verbose'],
)
state = params['state']
if state == 'list':
results = repoquery.repoquery()
if results['returncode'] != 0:
return {'failed': True,
'msg': results}
return {'changed': False, 'results': results, 'state': 'list', 'check_mode': check_mode}
return {'failed': True,
'changed': False,
'msg': 'Unknown state passed. %s' % state,
'state': 'unknown'}
|
ewolinetz/openshift-ansible
|
roles/lib_utils/src/class/repoquery.py
|
Python
|
apache-2.0
| 6,322
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import json
from frappe.utils import cstr
from frappe import _
from frappe.model.document import Document
class CustomField(Document):
def autoname(self):
self.set_fieldname()
self.name = self.dt + "-" + self.fieldname
def set_fieldname(self):
if not self.fieldname:
if not self.label:
frappe.throw(_("Label is mandatory"))
# remove special characters from fieldname
self.fieldname = filter(lambda x: x.isdigit() or x.isalpha() or '_',
cstr(self.label).lower().replace(' ','_'))
# fieldnames should be lowercase
self.fieldname = self.fieldname.lower()
def validate(self):
meta = frappe.get_meta(self.dt)
fieldnames = [df.fieldname for df in meta.get("fields")]
if self.insert_after and self.insert_after in fieldnames:
self.idx = fieldnames.index(self.insert_after) + 1
if not self.idx:
self.idx = len(fieldnames) + 1
self._old_fieldtype = self.db_get('fieldtype')
if not self.fieldname:
frappe.throw(_("Fieldname not set for Custom Field"))
def on_update(self):
frappe.clear_cache(doctype=self.dt)
if not self.flags.ignore_validate:
# validate field
from frappe.core.doctype.doctype.doctype import validate_fields_for_doctype
validate_fields_for_doctype(self.dt)
# update the schema
if not frappe.db.get_value('DocType', self.dt, 'issingle'):
if (self.fieldname not in frappe.db.get_table_columns(self.dt)
or getattr(self, "_old_fieldtype", None) != self.fieldtype):
from frappe.model.db_schema import updatedb
updatedb(self.dt)
def on_trash(self):
# delete property setter entries
frappe.db.sql("""\
DELETE FROM `tabProperty Setter`
WHERE doc_type = %s
AND field_name = %s""",
(self.dt, self.fieldname))
frappe.clear_cache(doctype=self.dt)
def validate_insert_after(self, meta):
if not meta.get_field(self.insert_after):
frappe.throw(_("Insert After field '{0}' mentioned in Custom Field '{1}', with label '{2}', does not exist")
.format(self.insert_after, self.name, self.label), frappe.DoesNotExistError)
if self.fieldname == self.insert_after:
frappe.throw(_("Insert After cannot be set as {0}").format(meta.get_label(self.insert_after)))
@frappe.whitelist()
def get_fields_label(doctype=None):
return [{"value": df.fieldname or "", "label": _(df.label or "")} for df in frappe.get_meta(doctype).get("fields")]
def create_custom_field_if_values_exist(doctype, df):
df = frappe._dict(df)
if df.fieldname in frappe.db.get_table_columns(doctype) and \
frappe.db.sql("""select count(*) from `tab{doctype}`
where ifnull({fieldname},'')!=''""".format(doctype=doctype, fieldname=df.fieldname))[0][0]:
create_custom_field(doctype, df)
def create_custom_field(doctype, df):
df = frappe._dict(df)
if not frappe.db.get_value("Custom Field", {"dt": doctype, "fieldname": df.fieldname}):
frappe.get_doc({
"doctype":"Custom Field",
"dt": doctype,
"permlevel": df.permlevel or 0,
"label": df.label,
"fieldname": df.fieldname or df.label.lower().replace(' ', '_'),
"fieldtype": df.fieldtype,
"options": df.options,
"insert_after": df.insert_after,
"print_hide": df.print_hide,
"hidden": df.hidden or 0
}).insert()
@frappe.whitelist()
def add_custom_field(doctype, df):
df = json.loads(df)
return create_custom_field(doctype, df)
|
rohitwaghchaure/frappe
|
frappe/custom/doctype/custom_field/custom_field.py
|
Python
|
mit
| 3,459
|
tabby_cat = "\tI'm tabbed in."
persian_cat = "I'm split\non a line."
backslash_cat = "I'm \\ a \\ cat."
fat_cat = '''
I'll do a list:
\t* Cat food
\t* Fishes
\t* Catnip\n\t* Grass'''
print tabby_cat + "\n" + persian_cat
print backslash_cat
print fat_cat
|
srinivasanmit/all-in-all
|
1/ex10.py
|
Python
|
gpl-3.0
| 255
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import urlparse
from tempest_lib.common.utils import data_utils
from tempest_lib import decorators
from tempest.api.messaging import base
from tempest import config
from tempest import test
LOG = logging.getLogger(__name__)
CONF = config.CONF
class TestClaims(base.BaseMessagingTest):
@classmethod
def resource_setup(cls):
super(TestClaims, cls).resource_setup()
cls.queue_name = data_utils.rand_name('Queues-Test')
# Create Queue
cls.create_queue(cls.queue_name)
def _post_and_claim_messages(self, queue_name, repeat=1):
# Post Messages
message_body = self.generate_message_body(repeat=repeat)
self.client.post_messages(queue_name=self.queue_name,
rbody=message_body)
# Post Claim
claim_ttl = data_utils.rand_int_id(start=60,
end=CONF.messaging.max_claim_ttl)
claim_grace = data_utils.\
rand_int_id(start=60, end=CONF.messaging.max_claim_grace)
claim_body = {"ttl": claim_ttl, "grace": claim_grace}
resp, body = self.client.post_claims(queue_name=self.queue_name,
rbody=claim_body)
return resp, body
@test.attr(type='smoke')
@test.idempotent_id('936cb1ca-b7af-44dd-a752-805e8c98156f')
def test_post_claim(self):
_, body = self._post_and_claim_messages(queue_name=self.queue_name)
claimed_message_uri = body[0]['href']
# Skipping this step till bug-1331517 is fixed
# Get posted claim
# self.client.query_claim(claimed_message_uri)
# Delete Claimed message
self.client.delete_messages(claimed_message_uri)
@decorators.skip_because(bug="1331517")
@test.attr(type='smoke')
@test.idempotent_id('84e491f4-68c6-451f-9846-b8f868eb27c5')
def test_query_claim(self):
# Post a Claim
resp, body = self._post_and_claim_messages(queue_name=self.queue_name)
# Query Claim
claim_uri = resp['location']
self.client.query_claim(claim_uri)
# Delete Claimed message
claimed_message_uri = body[0]['href']
self.delete_messages(claimed_message_uri)
@decorators.skip_because(bug="1328111")
@test.attr(type='smoke')
@test.idempotent_id('420ef0c5-9bd6-4b82-b06d-d9da330fefd3')
def test_update_claim(self):
# Post a Claim
resp, body = self._post_and_claim_messages(queue_name=self.queue_name)
claim_uri = resp['location']
claimed_message_uri = body[0]['href']
# Update Claim
claim_ttl = data_utils.rand_int_id(start=60,
end=CONF.messaging.max_claim_ttl)
update_rbody = {"ttl": claim_ttl}
self.client.update_claim(claim_uri, rbody=update_rbody)
# Verify claim ttl >= updated ttl value
_, body = self.client.query_claim(claim_uri)
updated_claim_ttl = body["ttl"]
self.assertTrue(updated_claim_ttl >= claim_ttl)
# Delete Claimed message
self.client.delete_messages(claimed_message_uri)
@test.attr(type='smoke')
@test.idempotent_id('fd4c7921-cb3f-4ed8-9ac8-e8f1e74c44aa')
def test_release_claim(self):
# Post a Claim
resp, body = self._post_and_claim_messages(queue_name=self.queue_name)
claim_uri = resp['location']
# Release Claim
self.client.release_claim(claim_uri)
# Delete Claimed message
# This will implicitly verify that the claim is deleted.
message_uri = urlparse.urlparse(claim_uri).path
self.client.delete_messages(message_uri)
@classmethod
def resource_cleanup(cls):
cls.delete_queue(cls.queue_name)
super(TestClaims, cls).resource_cleanup()
|
fengbeihong/tempest_automate_ironic
|
tempest/api/messaging/test_claims.py
|
Python
|
apache-2.0
| 4,431
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Convert PASCAL-like xml dataset to TFRecord for object_detection.
Example usage:
python object_detection/dataset_tools/create_pascal_tf_record.py \
--data_dir=/home/user/VOCdevkit \
--year=VOC2012 \
--output_path=/home/user/pascal.record
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import hashlib
import io
import logging
import os
import glob
from lxml import etree
import PIL.Image
import tensorflow as tf
from object_detection.utils import dataset_util
from object_detection.utils import label_map_util
flags = tf.app.flags
flags.DEFINE_string('annotations_dir', 'annotations', 'path to annotations xml directory.')
flags.DEFINE_string('label_map_path', 'data/pascal_label_map.pbtxt', 'Path to label map proto')
flags.DEFINE_boolean('ignore_difficult_instances', False, 'Whether to ignore difficult instances')
flags.DEFINE_string('output_path', 'out.tfrecord', 'Path to output TFRecord')
FLAGS = flags.FLAGS
def dict_to_tf_example(data, label_map_dict, ignore_difficult_instances=False):
"""Convert XML derived dict to tf.Example proto.
Notice that this function normalizes the bounding box coordinates provided
by the raw data.
Args:
data: dict holding PASCAL XML fields for a single image (obtained by
running dataset_util.recursive_parse_xml_to_dict)
label_map_dict: A map from string label names to integers ids.
ignore_difficult_instances: Whether to skip difficult instances in the
dataset (default: False).
Returns:
example: The converted tf.Example.
Raises:
ValueError: if the image pointed to by data['filename'] is not a valid JPEG
"""
full_path = data['path']
with tf.gfile.GFile(full_path, 'rb') as fid:
encoded_jpg = fid.read()
encoded_jpg_io = io.BytesIO(encoded_jpg)
image = PIL.Image.open(encoded_jpg_io)
if image.format != 'JPEG' and image.format != 'PNG':
print('image format:', image.format)
raise ValueError('Image format not JPEG or PNG')
key = hashlib.sha256(encoded_jpg).hexdigest()
width = int(data['size']['width'])
height = int(data['size']['height'])
xmin = []
ymin = []
xmax = []
ymax = []
classes = []
classes_text = []
truncated = []
poses = []
difficult_obj = []
if 'object' in data:
for obj in data['object']:
difficult = bool(int(obj['difficult']))
if ignore_difficult_instances and difficult:
continue
difficult_obj.append(int(difficult))
area = (float(obj['bndbox']['xmax']) - float(obj['bndbox']['xmin'])) * (float(obj['bndbox']['ymax']) - float(obj['bndbox']['ymin']))
if area < (width*height) / (16*16):
print('bndbox too small, skipping..', full_path)
continue
xmin.append(float(obj['bndbox']['xmin']) / width)
ymin.append(float(obj['bndbox']['ymin']) / height)
xmax.append(float(obj['bndbox']['xmax']) / width)
ymax.append(float(obj['bndbox']['ymax']) / height)
classes_text.append(obj['name'].encode('utf8'))
classes.append(label_map_dict[obj['name']])
truncated.append(int(obj['truncated']))
poses.append(obj['pose'].encode('utf8'))
example = tf.train.Example(features=tf.train.Features(feature={
'image/height': dataset_util.int64_feature(height),
'image/width': dataset_util.int64_feature(width),
'image/filename': dataset_util.bytes_feature(
data['filename'].encode('utf8')),
'image/source_id': dataset_util.bytes_feature(
data['filename'].encode('utf8')),
'image/key/sha256': dataset_util.bytes_feature(key.encode('utf8')),
'image/encoded': dataset_util.bytes_feature(encoded_jpg),
'image/format': dataset_util.bytes_feature('jpeg'.encode('utf8')),
'image/object/bbox/xmin': dataset_util.float_list_feature(xmin),
'image/object/bbox/xmax': dataset_util.float_list_feature(xmax),
'image/object/bbox/ymin': dataset_util.float_list_feature(ymin),
'image/object/bbox/ymax': dataset_util.float_list_feature(ymax),
'image/object/class/text': dataset_util.bytes_list_feature(classes_text),
'image/object/class/label': dataset_util.int64_list_feature(classes),
'image/object/difficult': dataset_util.int64_list_feature(difficult_obj),
'image/object/truncated': dataset_util.int64_list_feature(truncated),
'image/object/view': dataset_util.bytes_list_feature(poses),
}))
return example
def main(_):
writer = tf.python_io.TFRecordWriter(FLAGS.output_path)
label_map_dict = label_map_util.get_label_map_dict(FLAGS.label_map_path)
xmls = glob.glob(FLAGS.annotations_dir + '/*xml')
for idx, xml_file in enumerate(xmls):
if idx % 100 == 0:
logging.info('On image %d of %d', idx, len(xmls))
with tf.gfile.GFile(xml_file, 'r') as fid:
xml_str = fid.read()
xml = etree.fromstring(xml_str)
data = dataset_util.recursive_parse_xml_to_dict(xml)['annotation']
tf_example = dict_to_tf_example(data, label_map_dict, FLAGS.ignore_difficult_instances)
writer.write(tf_example.SerializeToString())
writer.close()
if __name__ == '__main__':
tf.app.run()
|
grehujt/SmallPythonProjects
|
object_detection/create_my_pascal_tf_record.py
|
Python
|
mit
| 5,864
|
#! /usr/bin/env python
"""Mimification and unmimification of mail messages.
Decode quoted-printable parts of a mail message or encode using
quoted-printable.
Usage:
mimify(input, output)
unmimify(input, output, decode_base64 = 0)
to encode and decode respectively. Input and output may be the name
of a file or an open file object. Only a readline() method is used
on the input file, only a write() method is used on the output file.
When using file names, the input and output file names may be the
same.
Interactive usage:
mimify.py -e [infile [outfile]]
mimify.py -d [infile [outfile]]
to encode and decode respectively. Infile defaults to standard
input and outfile to standard output.
"""
# Configure
MAXLEN = 200 # if lines longer than this, encode as quoted-printable
CHARSET = 'ISO-8859-1' # default charset for non-US-ASCII mail
QUOTE = '> ' # string replies are quoted with
# End configure
import re
import warnings
warnings.warn("the mimify module is deprecated; use the email package instead",
DeprecationWarning, 2)
__all__ = ["mimify","unmimify","mime_encode_header","mime_decode_header"]
qp = re.compile('^content-transfer-encoding:\\s*quoted-printable', re.I)
base64_re = re.compile('^content-transfer-encoding:\\s*base64', re.I)
mp = re.compile('^content-type:.*multipart/.*boundary="?([^;"\n]*)', re.I|re.S)
chrset = re.compile('^(content-type:.*charset=")(us-ascii|iso-8859-[0-9]+)(".*)', re.I|re.S)
he = re.compile('^-*\n')
mime_code = re.compile('=([0-9a-f][0-9a-f])', re.I)
mime_head = re.compile('=\\?iso-8859-1\\?q\\?([^? \t\n]+)\\?=', re.I)
repl = re.compile('^subject:\\s+re: ', re.I)
class File:
"""A simple fake file object that knows about limited read-ahead and
boundaries. The only supported method is readline()."""
def __init__(self, file, boundary):
self.file = file
self.boundary = boundary
self.peek = None
def readline(self):
if self.peek is not None:
return ''
line = self.file.readline()
if not line:
return line
if self.boundary:
if line == self.boundary + '\n':
self.peek = line
return ''
if line == self.boundary + '--\n':
self.peek = line
return ''
return line
class HeaderFile:
def __init__(self, file):
self.file = file
self.peek = None
def readline(self):
if self.peek is not None:
line = self.peek
self.peek = None
else:
line = self.file.readline()
if not line:
return line
if he.match(line):
return line
while 1:
self.peek = self.file.readline()
if len(self.peek) == 0 or \
(self.peek[0] != ' ' and self.peek[0] != '\t'):
return line
line = line + self.peek
self.peek = None
def mime_decode(line):
"""Decode a single line of quoted-printable text to 8bit."""
newline = ''
pos = 0
while 1:
res = mime_code.search(line, pos)
if res is None:
break
newline = newline + line[pos:res.start(0)] + \
chr(int(res.group(1), 16))
pos = res.end(0)
return newline + line[pos:]
def mime_decode_header(line):
"""Decode a header line to 8bit."""
newline = ''
pos = 0
while 1:
res = mime_head.search(line, pos)
if res is None:
break
match = res.group(1)
# convert underscores to spaces (before =XX conversion!)
match = ' '.join(match.split('_'))
newline = newline + line[pos:res.start(0)] + mime_decode(match)
pos = res.end(0)
return newline + line[pos:]
def unmimify_part(ifile, ofile, decode_base64 = 0):
"""Convert a quoted-printable part of a MIME mail message to 8bit."""
multipart = None
quoted_printable = 0
is_base64 = 0
is_repl = 0
if ifile.boundary and ifile.boundary[:2] == QUOTE:
prefix = QUOTE
else:
prefix = ''
# read header
hfile = HeaderFile(ifile)
while 1:
line = hfile.readline()
if not line:
return
if prefix and line[:len(prefix)] == prefix:
line = line[len(prefix):]
pref = prefix
else:
pref = ''
line = mime_decode_header(line)
if qp.match(line):
quoted_printable = 1
continue # skip this header
if decode_base64 and base64_re.match(line):
is_base64 = 1
continue
ofile.write(pref + line)
if not prefix and repl.match(line):
# we're dealing with a reply message
is_repl = 1
mp_res = mp.match(line)
if mp_res:
multipart = '--' + mp_res.group(1)
if he.match(line):
break
if is_repl and (quoted_printable or multipart):
is_repl = 0
# read body
while 1:
line = ifile.readline()
if not line:
return
line = re.sub(mime_head, '\\1', line)
if prefix and line[:len(prefix)] == prefix:
line = line[len(prefix):]
pref = prefix
else:
pref = ''
## if is_repl and len(line) >= 4 and line[:4] == QUOTE+'--' and line[-3:] != '--\n':
## multipart = line[:-1]
while multipart:
if line == multipart + '--\n':
ofile.write(pref + line)
multipart = None
line = None
break
if line == multipart + '\n':
ofile.write(pref + line)
nifile = File(ifile, multipart)
unmimify_part(nifile, ofile, decode_base64)
line = nifile.peek
if not line:
# premature end of file
break
continue
# not a boundary between parts
break
if line and quoted_printable:
while line[-2:] == '=\n':
line = line[:-2]
newline = ifile.readline()
if newline[:len(QUOTE)] == QUOTE:
newline = newline[len(QUOTE):]
line = line + newline
line = mime_decode(line)
if line and is_base64 and not pref:
import base64
line = base64.decodestring(line)
if line:
ofile.write(pref + line)
def unmimify(infile, outfile, decode_base64 = 0):
"""Convert quoted-printable parts of a MIME mail message to 8bit."""
if type(infile) == type(''):
ifile = open(infile)
if type(outfile) == type('') and infile == outfile:
import os
d, f = os.path.split(infile)
os.rename(infile, os.path.join(d, ',' + f))
else:
ifile = infile
if type(outfile) == type(''):
ofile = open(outfile, 'w')
else:
ofile = outfile
nifile = File(ifile, None)
unmimify_part(nifile, ofile, decode_base64)
ofile.flush()
mime_char = re.compile('[=\177-\377]') # quote these chars in body
mime_header_char = re.compile('[=?\177-\377]') # quote these in header
def mime_encode(line, header):
"""Code a single line as quoted-printable.
If header is set, quote some extra characters."""
if header:
reg = mime_header_char
else:
reg = mime_char
newline = ''
pos = 0
if len(line) >= 5 and line[:5] == 'From ':
# quote 'From ' at the start of a line for stupid mailers
newline = ('=%02x' % ord('F')).upper()
pos = 1
while 1:
res = reg.search(line, pos)
if res is None:
break
newline = newline + line[pos:res.start(0)] + \
('=%02x' % ord(res.group(0))).upper()
pos = res.end(0)
line = newline + line[pos:]
newline = ''
while len(line) >= 75:
i = 73
while line[i] == '=' or line[i-1] == '=':
i = i - 1
i = i + 1
newline = newline + line[:i] + '=\n'
line = line[i:]
return newline + line
mime_header = re.compile('([ \t(]|^)([-a-zA-Z0-9_+]*[\177-\377][-a-zA-Z0-9_+\177-\377]*)(?=[ \t)]|\n)')
def mime_encode_header(line):
"""Code a single header line as quoted-printable."""
newline = ''
pos = 0
while 1:
res = mime_header.search(line, pos)
if res is None:
break
newline = '%s%s%s=?%s?Q?%s?=' % \
(newline, line[pos:res.start(0)], res.group(1),
CHARSET, mime_encode(res.group(2), 1))
pos = res.end(0)
return newline + line[pos:]
mv = re.compile('^mime-version:', re.I)
cte = re.compile('^content-transfer-encoding:', re.I)
iso_char = re.compile('[\177-\377]')
def mimify_part(ifile, ofile, is_mime):
"""Convert an 8bit part of a MIME mail message to quoted-printable."""
has_cte = is_qp = is_base64 = 0
multipart = None
must_quote_body = must_quote_header = has_iso_chars = 0
header = []
header_end = ''
message = []
message_end = ''
# read header
hfile = HeaderFile(ifile)
while 1:
line = hfile.readline()
if not line:
break
if not must_quote_header and iso_char.search(line):
must_quote_header = 1
if mv.match(line):
is_mime = 1
if cte.match(line):
has_cte = 1
if qp.match(line):
is_qp = 1
elif base64_re.match(line):
is_base64 = 1
mp_res = mp.match(line)
if mp_res:
multipart = '--' + mp_res.group(1)
if he.match(line):
header_end = line
break
header.append(line)
# read body
while 1:
line = ifile.readline()
if not line:
break
if multipart:
if line == multipart + '--\n':
message_end = line
break
if line == multipart + '\n':
message_end = line
break
if is_base64:
message.append(line)
continue
if is_qp:
while line[-2:] == '=\n':
line = line[:-2]
newline = ifile.readline()
if newline[:len(QUOTE)] == QUOTE:
newline = newline[len(QUOTE):]
line = line + newline
line = mime_decode(line)
message.append(line)
if not has_iso_chars:
if iso_char.search(line):
has_iso_chars = must_quote_body = 1
if not must_quote_body:
if len(line) > MAXLEN:
must_quote_body = 1
# convert and output header and body
for line in header:
if must_quote_header:
line = mime_encode_header(line)
chrset_res = chrset.match(line)
if chrset_res:
if has_iso_chars:
# change us-ascii into iso-8859-1
if chrset_res.group(2).lower() == 'us-ascii':
line = '%s%s%s' % (chrset_res.group(1),
CHARSET,
chrset_res.group(3))
else:
# change iso-8859-* into us-ascii
line = '%sus-ascii%s' % chrset_res.group(1, 3)
if has_cte and cte.match(line):
line = 'Content-Transfer-Encoding: '
if is_base64:
line = line + 'base64\n'
elif must_quote_body:
line = line + 'quoted-printable\n'
else:
line = line + '7bit\n'
ofile.write(line)
if (must_quote_header or must_quote_body) and not is_mime:
ofile.write('Mime-Version: 1.0\n')
ofile.write('Content-Type: text/plain; ')
if has_iso_chars:
ofile.write('charset="%s"\n' % CHARSET)
else:
ofile.write('charset="us-ascii"\n')
if must_quote_body and not has_cte:
ofile.write('Content-Transfer-Encoding: quoted-printable\n')
ofile.write(header_end)
for line in message:
if must_quote_body:
line = mime_encode(line, 0)
ofile.write(line)
ofile.write(message_end)
line = message_end
while multipart:
if line == multipart + '--\n':
# read bit after the end of the last part
while 1:
line = ifile.readline()
if not line:
return
if must_quote_body:
line = mime_encode(line, 0)
ofile.write(line)
if line == multipart + '\n':
nifile = File(ifile, multipart)
mimify_part(nifile, ofile, 1)
line = nifile.peek
if not line:
# premature end of file
break
ofile.write(line)
continue
# unexpectedly no multipart separator--copy rest of file
while 1:
line = ifile.readline()
if not line:
return
if must_quote_body:
line = mime_encode(line, 0)
ofile.write(line)
def mimify(infile, outfile):
"""Convert 8bit parts of a MIME mail message to quoted-printable."""
if type(infile) == type(''):
ifile = open(infile)
if type(outfile) == type('') and infile == outfile:
import os
d, f = os.path.split(infile)
os.rename(infile, os.path.join(d, ',' + f))
else:
ifile = infile
if type(outfile) == type(''):
ofile = open(outfile, 'w')
else:
ofile = outfile
nifile = File(ifile, None)
mimify_part(nifile, ofile, 0)
ofile.flush()
import sys
if __name__ == '__main__' or (len(sys.argv) > 0 and sys.argv[0] == 'mimify'):
import getopt
usage = 'Usage: mimify [-l len] -[ed] [infile [outfile]]'
decode_base64 = 0
opts, args = getopt.getopt(sys.argv[1:], 'l:edb')
if len(args) not in (0, 1, 2):
print usage
sys.exit(1)
if (('-e', '') in opts) == (('-d', '') in opts) or \
((('-b', '') in opts) and (('-d', '') not in opts)):
print usage
sys.exit(1)
for o, a in opts:
if o == '-e':
encode = mimify
elif o == '-d':
encode = unmimify
elif o == '-l':
try:
MAXLEN = int(a)
except (ValueError, OverflowError):
print usage
sys.exit(1)
elif o == '-b':
decode_base64 = 1
if len(args) == 0:
encode_args = (sys.stdin, sys.stdout)
elif len(args) == 1:
encode_args = (args[0], sys.stdout)
else:
encode_args = (args[0], args[1])
if decode_base64:
encode_args = encode_args + (decode_base64,)
encode(*encode_args)
|
huran2014/huran.github.io
|
wot_gateway/usr/lib/python2.7/mimify.py
|
Python
|
gpl-2.0
| 15,021
|
# Copyright (c) 2016-2021 Adobe Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
__version__ = "2.19"
|
adobe-apiplatform/umapi-client.py
|
umapi_client/version.py
|
Python
|
mit
| 1,138
|
#!/usr/bin/python
#
# Created on Aug 25, 2016
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
# Avi Version: 17.1.1
#
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_gslbhealthmonitor
author: Gaurav Rastogi (grastogi@avinetworks.com)
short_description: Module for setup of GslbHealthMonitor Avi RESTful Object
description:
- This module is used to configure GslbHealthMonitor object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.4"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent","present"]
description:
description:
- User defined description for the object.
dns_monitor:
description:
- Healthmonitordns settings for gslbhealthmonitor.
external_monitor:
description:
- Healthmonitorexternal settings for gslbhealthmonitor.
failed_checks:
description:
- Number of continuous failed health checks before the server is marked down.
- Allowed values are 1-50.
- Default value when not specified in API or module is interpreted by Avi Controller as 2.
http_monitor:
description:
- Healthmonitorhttp settings for gslbhealthmonitor.
https_monitor:
description:
- Healthmonitorhttp settings for gslbhealthmonitor.
monitor_port:
description:
- Use this port instead of the port defined for the server in the pool.
- If the monitor succeeds to this port, the load balanced traffic will still be sent to the port of the server defined within the pool.
- Allowed values are 1-65535.
- Special values are 0 - 'use server port'.
name:
description:
- A user friendly name for this health monitor.
required: true
receive_timeout:
description:
- A valid response from the server is expected within the receive timeout window.
- This timeout must be less than the send interval.
- If server status is regularly flapping up and down, consider increasing this value.
- Allowed values are 1-300.
- Default value when not specified in API or module is interpreted by Avi Controller as 4.
send_interval:
description:
- Frequency, in seconds, that monitors are sent to a server.
- Allowed values are 1-3600.
- Default value when not specified in API or module is interpreted by Avi Controller as 5.
successful_checks:
description:
- Number of continuous successful health checks before server is marked up.
- Allowed values are 1-50.
- Default value when not specified in API or module is interpreted by Avi Controller as 2.
tcp_monitor:
description:
- Healthmonitortcp settings for gslbhealthmonitor.
tenant_ref:
description:
- It is a reference to an object of type tenant.
type:
description:
- Type of the health monitor.
- Enum options - HEALTH_MONITOR_PING, HEALTH_MONITOR_TCP, HEALTH_MONITOR_HTTP, HEALTH_MONITOR_HTTPS, HEALTH_MONITOR_EXTERNAL, HEALTH_MONITOR_UDP,
- HEALTH_MONITOR_DNS, HEALTH_MONITOR_GSLB.
required: true
udp_monitor:
description:
- Healthmonitorudp settings for gslbhealthmonitor.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Uuid of the health monitor.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create GslbHealthMonitor object
avi_gslbhealthmonitor:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_gslbhealthmonitor
"""
RETURN = '''
obj:
description: GslbHealthMonitor (api/gslbhealthmonitor) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, HAS_AVI, avi_ansible_api)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
description=dict(type='str',),
dns_monitor=dict(type='dict',),
external_monitor=dict(type='dict',),
failed_checks=dict(type='int',),
http_monitor=dict(type='dict',),
https_monitor=dict(type='dict',),
monitor_port=dict(type='int',),
name=dict(type='str', required=True),
receive_timeout=dict(type='int',),
send_interval=dict(type='int',),
successful_checks=dict(type='int',),
tcp_monitor=dict(type='dict',),
tenant_ref=dict(type='str',),
type=dict(type='str', required=True),
udp_monitor=dict(type='dict',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'gslbhealthmonitor',
set([]))
if __name__ == '__main__':
main()
|
alexlo03/ansible
|
lib/ansible/modules/network/avi/avi_gslbhealthmonitor.py
|
Python
|
gpl-3.0
| 6,453
|
# ===============================================================================
# Copyright 2016 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from __future__ import absolute_import
import json
from pyface.message_dialog import warning
from traits.api import HasTraits, Str, Int, Button
from traitsui.api import View, UItem, Item, HGroup, VGroup, TextEditor
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.hardware.core.communicators.ethernet_communicator import (
EthernetCommunicator,
)
class FirmwareClient(HasTraits):
command = Str # (enter_set=True, auto_set=False)
responses = Str
send_button = Button("Send")
host = Str
port = Int
test_button = Button("Test")
_cnt = 0
def __init__(self, *args, **kw):
super(FirmwareClient, self).__init__(*args, **kw)
c = EthernetCommunicator(
host=self.host, port=self.port, use_end=True, kind="TCP"
)
self._comm = c
def test_connection(self):
if not self._comm.open():
warning(None, "Could not connect to {}:{}".format(self.host, self.port))
else:
return True
def _send(self, cmd):
resp = self._comm.ask(cmd)
resp = "{} ==> {}".format(cmd, resp)
self.responses = "{}\n{}".format(self.responses, resp)
# handlers
def _test_button_fired(self):
# if self._cnt % 2 == 0:
# self._send('Open FF')
# action = 'open'
# else:
# self._send('Close FF')
# action = 'close'
#
# time.sleep(0.5)
# self._send('GetIndicatorState FF,{}'.format(action))
# self._cnt += 1
# d = json.dumps({'command': 'GetPosition', 'drive': 'feeder', 'position': 1, 'units': 'turns'})
# pos = self.command if self._cnt % 2 == 0 else 0
pos = float(self.command)
d = json.dumps(
{
"command": "MoveAbsolute",
"drive": "funnel",
"position": pos,
"units": "turns",
}
)
self._send(d)
self._cnt += 1
# v, a, d = self.command.split(',')
# d = {'command': 'StartJitter', 'drive': 'feeder', 'turns': 0.125, 'p1': 0.1, 'p2': 0.25,
# 'velocity': int(v), 'acceleration': int(a), 'deceleration': int(d)}
# d = json.dumps(d)
# self._send(d)
# time.sleep(5)
#
# d = {'command': 'StopJitter', 'drive': 'feeder'}
# d = json.dumps(d)
# self._send(d)
# mf = MessageFrame()
# mf.nmessage_len = 8
# mf.message_len = True
# imgstr = self._comm.ask('GetImageArray', message_frame=mf, timeout=5)
# print len(imgstr)
# c = NMGRLCamera()
# print c.get_image_data()
# resp = self._comm.ask('GetImageArray')
# print resp
# print self._send('GetImageArray')
# for i in range(5):
# if i % 2 == 0:
# self._send('Open A')
# else:
# self._send('Close A')
# time.sleep(1)
def _send_button_fired(self):
self._send(self.command)
# def _command_changed(self):
# self._send(self.command)
def traits_view(self):
v = View(
VGroup(
HGroup(Item("command"), UItem("send_button"), UItem("test_button")),
UItem("responses", style="custom", editor=TextEditor(read_only=True)),
),
title="Furnace Firmware Client",
resizable=True,
)
return v
if __name__ == "__main__":
c = FirmwareClient(host="192.168.2.2", port=4567)
if c.test_connection():
c.configure_traits()
# ============= EOF =============================================
|
USGSDenverPychron/pychron
|
pychron/furnace/firmware/client.py
|
Python
|
apache-2.0
| 4,555
|
import sys
from threading import Lock
import time
import types
from . import values # retain this import style for testability
from .context_managers import ExceptionCounter, InprogressTracker, Timer
from .metrics_core import (
Metric, METRIC_LABEL_NAME_RE, METRIC_NAME_RE,
RESERVED_METRIC_LABEL_NAME_RE,
)
from .registry import REGISTRY
from .utils import floatToGoString, INF
if sys.version_info > (3,):
unicode = str
create_bound_method = types.MethodType
else:
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def _build_full_name(metric_type, name, namespace, subsystem, unit):
full_name = ''
if namespace:
full_name += namespace + '_'
if subsystem:
full_name += subsystem + '_'
full_name += name
if unit and not full_name.endswith("_" + unit):
full_name += "_" + unit
if unit and metric_type in ('info', 'stateset'):
raise ValueError('Metric name is of a type that cannot have a unit: ' + full_name)
if metric_type == 'counter' and full_name.endswith('_total'):
full_name = full_name[:-6] # Munge to OpenMetrics.
return full_name
def _validate_labelnames(cls, labelnames):
labelnames = tuple(labelnames)
for l in labelnames:
if not METRIC_LABEL_NAME_RE.match(l):
raise ValueError('Invalid label metric name: ' + l)
if RESERVED_METRIC_LABEL_NAME_RE.match(l):
raise ValueError('Reserved label metric name: ' + l)
if l in cls._reserved_labelnames:
raise ValueError('Reserved label metric name: ' + l)
return labelnames
class MetricWrapperBase(object):
_type = None
_reserved_labelnames = ()
def _is_observable(self):
# Whether this metric is observable, i.e.
# * a metric without label names and values, or
# * the child of a labelled metric.
return not self._labelnames or (self._labelnames and self._labelvalues)
def _is_parent(self):
return self._labelnames and not self._labelvalues
def _get_metric(self):
return Metric(self._name, self._documentation, self._type, self._unit)
def describe(self):
return [self._get_metric()]
def collect(self):
metric = self._get_metric()
for suffix, labels, value in self._samples():
metric.add_sample(self._name + suffix, labels, value)
return [metric]
def __init__(self,
name,
documentation,
labelnames=(),
namespace='',
subsystem='',
unit='',
registry=REGISTRY,
labelvalues=None,
):
self._name = _build_full_name(self._type, name, namespace, subsystem, unit)
self._labelnames = _validate_labelnames(self, labelnames)
self._labelvalues = tuple(labelvalues or ())
self._kwargs = {}
self._documentation = documentation
self._unit = unit
if not METRIC_NAME_RE.match(self._name):
raise ValueError('Invalid metric name: ' + self._name)
if self._is_parent():
# Prepare the fields needed for child metrics.
self._lock = Lock()
self._metrics = {}
if self._is_observable():
self._metric_init()
if not self._labelvalues:
# Register the multi-wrapper parent metric, or if a label-less metric, the whole shebang.
if registry:
registry.register(self)
def labels(self, *labelvalues, **labelkwargs):
"""Return the child for the given labelset.
All metrics can have labels, allowing grouping of related time series.
Taking a counter as an example:
from prometheus_client import Counter
c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint'])
c.labels('get', '/').inc()
c.labels('post', '/submit').inc()
Labels can also be provided as keyword arguments:
from prometheus_client import Counter
c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint'])
c.labels(method='get', endpoint='/').inc()
c.labels(method='post', endpoint='/submit').inc()
See the best practices on [naming](http://prometheus.io/docs/practices/naming/)
and [labels](http://prometheus.io/docs/practices/instrumentation/#use-labels).
"""
if not self._labelnames:
raise ValueError('No label names were set when constructing %s' % self)
if self._labelvalues:
raise ValueError('%s already has labels set (%s); can not chain calls to .labels()' % (
self,
dict(zip(self._labelnames, self._labelvalues))
))
if labelvalues and labelkwargs:
raise ValueError("Can't pass both *args and **kwargs")
if labelkwargs:
if sorted(labelkwargs) != sorted(self._labelnames):
raise ValueError('Incorrect label names')
labelvalues = tuple(unicode(labelkwargs[l]) for l in self._labelnames)
else:
if len(labelvalues) != len(self._labelnames):
raise ValueError('Incorrect label count')
labelvalues = tuple(unicode(l) for l in labelvalues)
with self._lock:
if labelvalues not in self._metrics:
self._metrics[labelvalues] = self.__class__(
self._name,
documentation=self._documentation,
labelnames=self._labelnames,
unit=self._unit,
labelvalues=labelvalues,
**self._kwargs
)
return self._metrics[labelvalues]
def remove(self, *labelvalues):
if not self._labelnames:
raise ValueError('No label names were set when constructing %s' % self)
"""Remove the given labelset from the metric."""
if len(labelvalues) != len(self._labelnames):
raise ValueError('Incorrect label count (expected %d, got %s)' % (len(self._labelnames), labelvalues))
labelvalues = tuple(unicode(l) for l in labelvalues)
with self._lock:
del self._metrics[labelvalues]
def _samples(self):
if self._is_parent():
return self._multi_samples()
else:
return self._child_samples()
def _multi_samples(self):
with self._lock:
metrics = self._metrics.copy()
for labels, metric in metrics.items():
series_labels = list(zip(self._labelnames, labels))
for suffix, sample_labels, value in metric._samples():
yield (suffix, dict(series_labels + list(sample_labels.items())), value)
def _child_samples(self): # pragma: no cover
raise NotImplementedError('_child_samples() must be implemented by %r' % self)
def _metric_init(self): # pragma: no cover
"""
Initialize the metric object as a child, i.e. when it has labels (if any) set.
This is factored as a separate function to allow for deferred initialization.
"""
raise NotImplementedError('_metric_init() must be implemented by %r' % self)
class Counter(MetricWrapperBase):
"""A Counter tracks counts of events or running totals.
Example use cases for Counters:
- Number of requests processed
- Number of items that were inserted into a queue
- Total amount of data that a system has processed
Counters can only go up (and be reset when the process restarts). If your use case can go down,
you should use a Gauge instead.
An example for a Counter:
from prometheus_client import Counter
c = Counter('my_failures_total', 'Description of counter')
c.inc() # Increment by 1
c.inc(1.6) # Increment by given value
There are utilities to count exceptions raised:
@c.count_exceptions()
def f():
pass
with c.count_exceptions():
pass
# Count only one type of exception
with c.count_exceptions(ValueError):
pass
"""
_type = 'counter'
def _metric_init(self):
self._value = values.ValueClass(self._type, self._name, self._name + '_total', self._labelnames,
self._labelvalues)
self._created = time.time()
def inc(self, amount=1):
"""Increment counter by the given amount."""
if amount < 0:
raise ValueError('Counters can only be incremented by non-negative amounts.')
self._value.inc(amount)
def count_exceptions(self, exception=Exception):
"""Count exceptions in a block of code or function.
Can be used as a function decorator or context manager.
Increments the counter when an exception of the given
type is raised up out of the code.
"""
return ExceptionCounter(self, exception)
def _child_samples(self):
return (
('_total', {}, self._value.get()),
('_created', {}, self._created),
)
class Gauge(MetricWrapperBase):
"""Gauge metric, to report instantaneous values.
Examples of Gauges include:
- Inprogress requests
- Number of items in a queue
- Free memory
- Total memory
- Temperature
Gauges can go both up and down.
from prometheus_client import Gauge
g = Gauge('my_inprogress_requests', 'Description of gauge')
g.inc() # Increment by 1
g.dec(10) # Decrement by given value
g.set(4.2) # Set to a given value
There are utilities for common use cases:
g.set_to_current_time() # Set to current unixtime
# Increment when entered, decrement when exited.
@g.track_inprogress()
def f():
pass
with g.track_inprogress():
pass
A Gauge can also take its value from a callback:
d = Gauge('data_objects', 'Number of objects')
my_dict = {}
d.set_function(lambda: len(my_dict))
"""
_type = 'gauge'
_MULTIPROC_MODES = frozenset(('min', 'max', 'livesum', 'liveall', 'all'))
def __init__(self,
name,
documentation,
labelnames=(),
namespace='',
subsystem='',
unit='',
registry=REGISTRY,
labelvalues=None,
multiprocess_mode='all',
):
self._multiprocess_mode = multiprocess_mode
if multiprocess_mode not in self._MULTIPROC_MODES:
raise ValueError('Invalid multiprocess mode: ' + multiprocess_mode)
super(Gauge, self).__init__(
name=name,
documentation=documentation,
labelnames=labelnames,
namespace=namespace,
subsystem=subsystem,
unit=unit,
registry=registry,
labelvalues=labelvalues,
)
self._kwargs['multiprocess_mode'] = self._multiprocess_mode
def _metric_init(self):
self._value = values.ValueClass(
self._type, self._name, self._name, self._labelnames, self._labelvalues,
multiprocess_mode=self._multiprocess_mode
)
def inc(self, amount=1):
"""Increment gauge by the given amount."""
self._value.inc(amount)
def dec(self, amount=1):
"""Decrement gauge by the given amount."""
self._value.inc(-amount)
def set(self, value):
"""Set gauge to the given value."""
self._value.set(float(value))
def set_to_current_time(self):
"""Set gauge to the current unixtime."""
self.set(time.time())
def track_inprogress(self):
"""Track inprogress blocks of code or functions.
Can be used as a function decorator or context manager.
Increments the gauge when the code is entered,
and decrements when it is exited.
"""
return InprogressTracker(self)
def time(self):
"""Time a block of code or function, and set the duration in seconds.
Can be used as a function decorator or context manager.
"""
return Timer(self.set)
def set_function(self, f):
"""Call the provided function to return the Gauge value.
The function must return a float, and may be called from
multiple threads. All other methods of the Gauge become NOOPs.
"""
def samples(self):
return (('', {}, float(f())),)
self._child_samples = create_bound_method(samples, self)
def _child_samples(self):
return (('', {}, self._value.get()),)
class Summary(MetricWrapperBase):
"""A Summary tracks the size and number of events.
Example use cases for Summaries:
- Response latency
- Request size
Example for a Summary:
from prometheus_client import Summary
s = Summary('request_size_bytes', 'Request size (bytes)')
s.observe(512) # Observe 512 (bytes)
Example for a Summary using time:
from prometheus_client import Summary
REQUEST_TIME = Summary('response_latency_seconds', 'Response latency (seconds)')
@REQUEST_TIME.time()
def create_response(request):
'''A dummy function'''
time.sleep(1)
Example for using the same Summary object as a context manager:
with REQUEST_TIME.time():
pass # Logic to be timed
"""
_type = 'summary'
_reserved_labelnames = ['quantile']
def _metric_init(self):
self._count = values.ValueClass(self._type, self._name, self._name + '_count', self._labelnames,
self._labelvalues)
self._sum = values.ValueClass(self._type, self._name, self._name + '_sum', self._labelnames, self._labelvalues)
self._created = time.time()
def observe(self, amount):
"""Observe the given amount."""
self._count.inc(1)
self._sum.inc(amount)
def time(self):
"""Time a block of code or function, and observe the duration in seconds.
Can be used as a function decorator or context manager.
"""
return Timer(self.observe)
def _child_samples(self):
return (
('_count', {}, self._count.get()),
('_sum', {}, self._sum.get()),
('_created', {}, self._created))
class Histogram(MetricWrapperBase):
"""A Histogram tracks the size and number of events in buckets.
You can use Histograms for aggregatable calculation of quantiles.
Example use cases:
- Response latency
- Request size
Example for a Histogram:
from prometheus_client import Histogram
h = Histogram('request_size_bytes', 'Request size (bytes)')
h.observe(512) # Observe 512 (bytes)
Example for a Histogram using time:
from prometheus_client import Histogram
REQUEST_TIME = Histogram('response_latency_seconds', 'Response latency (seconds)')
@REQUEST_TIME.time()
def create_response(request):
'''A dummy function'''
time.sleep(1)
Example of using the same Histogram object as a context manager:
with REQUEST_TIME.time():
pass # Logic to be timed
The default buckets are intended to cover a typical web/rpc request from milliseconds to seconds.
They can be overridden by passing `buckets` keyword argument to `Histogram`.
"""
_type = 'histogram'
_reserved_labelnames = ['le']
DEFAULT_BUCKETS = (.005, .01, .025, .05, .075, .1, .25, .5, .75, 1.0, 2.5, 5.0, 7.5, 10.0, INF)
def __init__(self,
name,
documentation,
labelnames=(),
namespace='',
subsystem='',
unit='',
registry=REGISTRY,
labelvalues=None,
buckets=DEFAULT_BUCKETS,
):
self._prepare_buckets(buckets)
super(Histogram, self).__init__(
name=name,
documentation=documentation,
labelnames=labelnames,
namespace=namespace,
subsystem=subsystem,
unit=unit,
registry=registry,
labelvalues=labelvalues,
)
self._kwargs['buckets'] = buckets
def _prepare_buckets(self, buckets):
buckets = [float(b) for b in buckets]
if buckets != sorted(buckets):
# This is probably an error on the part of the user,
# so raise rather than sorting for them.
raise ValueError('Buckets not in sorted order')
if buckets and buckets[-1] != INF:
buckets.append(INF)
if len(buckets) < 2:
raise ValueError('Must have at least two buckets')
self._upper_bounds = buckets
def _metric_init(self):
self._buckets = []
self._created = time.time()
bucket_labelnames = self._labelnames + ('le',)
self._sum = values.ValueClass(self._type, self._name, self._name + '_sum', self._labelnames, self._labelvalues)
for b in self._upper_bounds:
self._buckets.append(values.ValueClass(
self._type,
self._name,
self._name + '_bucket',
bucket_labelnames,
self._labelvalues + (floatToGoString(b),))
)
def observe(self, amount):
"""Observe the given amount."""
self._sum.inc(amount)
for i, bound in enumerate(self._upper_bounds):
if amount <= bound:
self._buckets[i].inc(1)
break
def time(self):
"""Time a block of code or function, and observe the duration in seconds.
Can be used as a function decorator or context manager.
"""
return Timer(self.observe)
def _child_samples(self):
samples = []
acc = 0
for i, bound in enumerate(self._upper_bounds):
acc += self._buckets[i].get()
samples.append(('_bucket', {'le': floatToGoString(bound)}, acc))
samples.append(('_count', {}, acc))
samples.append(('_sum', {}, self._sum.get()))
samples.append(('_created', {}, self._created))
return tuple(samples)
class Info(MetricWrapperBase):
"""Info metric, key-value pairs.
Examples of Info include:
- Build information
- Version information
- Potential target metadata
Example usage:
from prometheus_client import Info
i = Info('my_build', 'Description of info')
i.info({'version': '1.2.3', 'buildhost': 'foo@bar'})
Info metrics do not work in multiprocess mode.
"""
_type = 'info'
def _metric_init(self):
self._labelname_set = set(self._labelnames)
self._lock = Lock()
self._value = {}
def info(self, val):
"""Set info metric."""
if self._labelname_set.intersection(val.keys()):
raise ValueError('Overlapping labels for Info metric, metric: %s child: %s' % (
self._labelnames, val))
with self._lock:
self._value = dict(val)
def _child_samples(self):
with self._lock:
return (('_info', self._value, 1.0,),)
class Enum(MetricWrapperBase):
"""Enum metric, which of a set of states is true.
Example usage:
from prometheus_client import Enum
e = Enum('task_state', 'Description of enum',
states=['starting', 'running', 'stopped'])
e.state('running')
The first listed state will be the default.
Enum metrics do not work in multiprocess mode.
"""
_type = 'stateset'
def __init__(self,
name,
documentation,
labelnames=(),
namespace='',
subsystem='',
unit='',
registry=REGISTRY,
labelvalues=None,
states=None,
):
super(Enum, self).__init__(
name=name,
documentation=documentation,
labelnames=labelnames,
namespace=namespace,
subsystem=subsystem,
unit=unit,
registry=registry,
labelvalues=labelvalues,
)
if name in labelnames:
raise ValueError('Overlapping labels for Enum metric: %s' % (name,))
if not states:
raise ValueError('No states provided for Enum metric: %s' % (name,))
self._kwargs['states'] = self._states = states
def _metric_init(self):
self._value = 0
self._lock = Lock()
def state(self, state):
"""Set enum metric state."""
with self._lock:
self._value = self._states.index(state)
def _child_samples(self):
with self._lock:
return [
('', {self._name: s}, 1 if i == self._value else 0,)
for i, s
in enumerate(self._states)
]
|
kawamon/hue
|
desktop/core/ext-py/prometheus_client-0.7.1/prometheus_client/metrics.py
|
Python
|
apache-2.0
| 21,319
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'valerio cosentino'
import mysql.connector
class DbUtil():
"""
This class provides database utilities
"""
def get_connection(self, config):
"""
gets DB connection
:type config: dict
:param config: the DB configuration file
"""
return mysql.connector.connect(**config)
def close_connection(self, cnx):
"""
closes DB connection
:type cnx: Object
:param cnx: DB connection to close
"""
cnx.close()
def lowercase(self, _str):
"""
conver str to lowercase
:type _str: str
:param _str: str to convert
"""
if _str:
_str = _str.lower()
return _str
def select_project_id(self, cnx, project_name, logger):
"""
gets project id
:type cnx: Object
:param cnx: DB connection
:type project_name: str
:param project_name: name of the project
:type logger: Object
:param logger: logger
"""
found = None
cursor = cnx.cursor()
query = "SELECT p.id " \
"FROM project p " \
"WHERE p.name = %s"
arguments = [project_name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.error("the project " + str(project_name) + " does not exist")
cursor.close()
return found
def insert_project(self, cnx, db_name, project_name):
"""
inserts a project in the DB
:type cnx: Object
:param cnx: DB connection
:type db_name: str
:param db_name: the name of an existing DB
:type project_name: str
:param project_name: the name of the project to create
"""
self.set_database(cnx, db_name)
cursor = cnx.cursor()
query = "INSERT IGNORE INTO project " \
"VALUES (%s, %s)"
arguments = [None, project_name]
cursor.execute(query, arguments)
cnx.commit()
cursor.close()
def insert_repo(self, cnx, project_id, repo_name, logger):
"""
inserts repository
:type cnx: Object
:param cnx: DB connection
:type project_id: int
:param project_id: id of the project
:type repo_name: str
:param repo_name: name of the repository
:type logger: Object
:param logger: logger
"""
cursor = cnx.cursor()
query = "INSERT IGNORE INTO repository " \
"VALUES (%s, %s, %s)"
arguments = [None, project_id, repo_name]
cursor.execute(query, arguments)
cnx.commit()
cursor.close()
def insert_issue_tracker(self, cnx, repo_id, issue_tracker_name, issue_type, logger):
"""
inserts issue tracker
:type cnx: Object
:param cnx: DB connection
:type repo_id: int
:param repo_id: id of the repository
:type issue_tracker_name: str
:param issue_tracker_name: name of the issue tracker
:type issue_type: str
:param issue_type: type of the issue tracker
:type logger: Object
:param logger: logger
"""
cursor = cnx.cursor()
query = "INSERT IGNORE INTO issue_tracker " \
"VALUES (%s, %s, %s, %s)"
arguments = [None, repo_id, issue_tracker_name, issue_type]
cursor.execute(query, arguments)
cnx.commit()
query = "SELECT id " \
"FROM issue_tracker " \
"WHERE name = %s"
arguments = [issue_tracker_name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.warning("no issue with name " + str(issue_tracker_name))
cursor.close()
return found
def select_label_id(self, cnx, name, logger):
"""
selects the label id by its name
:type cnx: Object
:param cnx: DB connection
:type name: str
:param name: the name of the label
:type logger: Object
:param logger: logger
"""
cursor = cnx.cursor()
query = "SELECT id FROM label WHERE name = %s"
arguments = [name]
cursor.execute(query, arguments)
row = cursor.fetchone()
found = None
if row:
found = row[0]
else:
logger.warning("no label with name " + str(name))
cursor.close()
return found
def insert_label(self, cnx, name, logger):
"""
inserts a label
:type cnx: Object
:param cnx: DB connection
:type name: str
:param name: the name of the label
:type logger: Object
:param logger: logger
"""
cursor = cnx.cursor()
query = "INSERT IGNORE INTO label " \
"VALUES (%s, %s)"
arguments = [None, name]
cursor.execute(query, arguments)
cnx.commit()
cursor.close()
def select_repo_id(self, cnx, repo_name, logger):
"""
selects repository id
:type cnx: Object
:param cnx: DB connection
:type repo_name: str
:param repo_name: name of the repository
:type logger: Object
:param logger: logger
"""
found = None
cursor = cnx.cursor()
query = "SELECT id " \
"FROM repository " \
"WHERE name = %s"
arguments = [repo_name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.error("the repository " + repo_name + " does not exist")
cursor.close()
return found
def select_instant_messaging_id(self, cnx, im_name, logger):
"""
selects instant messaging id
:type cnx: Object
:param cnx: DB connection
:type im_name: str
:param im_name: name of the instant messaging
:type logger: Object
:param logger: logger
"""
found = None
cursor = cnx.cursor()
query = "SELECT id " \
"FROM instant_messaging " \
"WHERE name = %s"
arguments = [im_name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.error("the instant messaging " + im_name + " does not exist")
cursor.close()
return found
def insert_user(self, cnx, name, email, logger):
"""
inserts user
:type cnx: Object
:param cnx: DB connection
:type name: str
:param name: name of the user
:type email: str
:param email: email of the user
:type logger: Object
:param logger: logger
"""
cursor = cnx.cursor()
query = "INSERT IGNORE INTO user " \
"VALUES (%s, %s, %s)"
arguments = [None, name, email]
cursor.execute(query, arguments)
cnx.commit()
cursor.close()
def select_user_id_by_email(self, cnx, email, logger):
"""
selects user id by email
:type cnx: Object
:param cnx: DB connection
:type email: str
:param email: email of the user
:type logger: Object
:param logger: logger
"""
found = None
if email:
cursor = cnx.cursor()
query = "SELECT id " \
"FROM user " \
"WHERE email = %s"
arguments = [email]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.debug("there is not user with this email " + email)
cursor.close()
return found
def select_user_id_by_name(self, cnx, name, logger):
"""
selects user id by name
:type cnx: Object
:param cnx: DB connection
:type name: str
:param name: name of the user
:type logger: Object
:param logger: logger
"""
found = None
if name:
found = None
cursor = cnx.cursor()
query = "SELECT id " \
"FROM user " \
"WHERE name = %s"
arguments = [name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.debug("there is not user with this name " + name)
cursor.close()
return found
def select_forum_id(self, cnx, forum_name, logger):
"""
selects forum id
:type cnx: Object
:param cnx: DB connection
:type forum_name: str
:param forum_name: name of the forum
:type logger: Object
:param logger: logger
"""
found = None
cursor = cnx.cursor()
query = "SELECT id " \
"FROM forum " \
"WHERE name = %s"
arguments = [forum_name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.error("the forum " + forum_name + " does not exist")
cursor.close()
return found
def select_issue_tracker_id(self, cnx, issue_tracker_name, logger):
"""
selects issue tracker id
:type cnx: Object
:param cnx: DB connection
:type issue_tracker_name: str
:param issue_tracker_name: name of the issue tracker
:type logger: Object
:param logger: logger
"""
found = None
cursor = cnx.cursor()
query = "SELECT id " \
"FROM issue_tracker " \
"WHERE name = %s"
arguments = [issue_tracker_name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
else:
logger.error("the issue tracker " + issue_tracker_name + " does not exist")
cursor.close()
return found
def get_issue_dependency_type_id(self, cnx, name):
"""
selects issue dependency type id
:type cnx: Object
:param cnx: DB connection
:type name: str
:param name: dependency type name
"""
found = None
cursor = cnx.cursor()
query = "SELECT id FROM issue_dependency_type WHERE name = %s"
arguments = [name]
cursor.execute(query, arguments)
row = cursor.fetchone()
cursor.close()
if row:
found = row[0]
return found
def get_message_type_id(self, cnx, name):
"""
selects message type id
:type cnx: Object
:param cnx: DB connection
:type name: str
:param name: message type name
"""
found = None
cursor = cnx.cursor()
query = "SELECT id FROM message_type WHERE name = %s"
arguments = [name]
cursor.execute(query, arguments)
row = cursor.fetchone()
if row:
found = row[0]
cursor.close()
return found
def set_database(self, cnx, db_name):
"""
set database
:type cnx: Object
:param cnx: DB connection
:type db_name: str
:param db_name: name of the database
"""
cursor = cnx.cursor()
use_database = "USE " + db_name
cursor.execute(use_database)
cursor.close()
def set_settings(self, cnx):
"""
set database settings
:type cnx: Object
:param cnx: DB connection
"""
cursor = cnx.cursor()
cursor.execute("set global innodb_file_format = BARRACUDA")
cursor.execute("set global innodb_file_format_max = BARRACUDA")
cursor.execute("set global innodb_large_prefix = ON")
cursor.execute("set global character_set_server = utf8")
cursor.execute("set global max_connections = 500")
cursor.close()
def restart_connection(self, config, logger):
"""
restart DB connection
:type config: dict
:param config: the DB configuration file
:type logger: Object
:param logger: logger
"""
logger.info("restarting connection...")
return mysql.connector.connect(**config)
|
SOM-Research/Gitana
|
util/db_util.py
|
Python
|
mit
| 12,774
|
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Glm(CMakePackage):
"""OpenGL Mathematics (GLM) is a header only C++ mathematics library for
graphics software based on the OpenGL Shading Language (GLSL) specification
"""
homepage = "https://github.com/g-truc/glm"
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
depends_on('cmake@2.6:', type='build')
|
wscullin/spack
|
var/spack/repos/builtin/packages/glm/package.py
|
Python
|
lgpl-2.1
| 1,663
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README_PYPI.rst') as file:
long_description = file.read()
setup(name='poirot',
version='1.0.1',
author='Emanuel Feld',
author_email='elefbet@gmail.com',
description="Search a Git repository's full revision history (commit messages and diffs) for text patterns.",
long_description=long_description,
url='https://github.com/emanuelfeld/poirot',
license='https://raw.githubusercontent.com/emanuelfeld/poirot/master/LICENSE.md',
packages=['poirot'],
install_requires=[
'tqdm>=3.4.0',
'Jinja2>=2.8',
'regex>=2015.11.22',
'requests>=2.9.1'
],
test_suite='nose.collector',
tests_require=['nose-progressive'],
classifiers=[
'Environment :: Console',
'Intended Audience :: Developers',
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
include_package_data=True,
entry_points={
'console_scripts': [
'poirot=poirot.poirot:main',
]
},
zip_safe=False)
|
emanuelfeld/poirot
|
setup.py
|
Python
|
mit
| 1,442
|
from django.db import models
from django.contrib.auth import models as auth
import datetime
from application import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
typeChoices = (
('task', 'Task'),
('userStory', 'User Story'),
)
statusChoices = (
('toDo', 'To do'),
('inProgress', 'in progress'),
('done', 'Done'),
)
categoryChoices = (
('frontend', 'Frontend'),
('backend', 'Backend'),
('design', 'Design'),
)
purposeChoices = (
('bugfix', 'Bugfix'),
('feature', 'Feature'),
)
class WorkGroup(models.Model):
name = models.CharField(
max_length=200,
unique = True,
)
def __unicode__(self):
return u'%s' % (self.name)
class TaskCard(models.Model):
creator = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='createdTasks',
on_delete=models.PROTECT,
)
processor = models.ForeignKey(
settings.AUTH_USER_MODEL,
related_name='processingTasks',
blank=True,
null=True,
)
createTime = models.DateTimeField(
auto_now_add=True,
)
startTime = models.DateField(
null=True,
blank=True,
)
#endTime = models.DateTimeField()
#sprint = models.ForeignKey(Sprint)
title = models.CharField(
max_length=200,
)
taskType = models.CharField(
max_length=15,
choices=typeChoices,
default='task',
)
taskPurpose = models.CharField(
max_length=15,
choices=purposeChoices,
blank=True,
null=True,
)
taskCategory = models.CharField(
max_length=15,
choices=categoryChoices,
blank=True,
null=True,
)
description = models.TextField()
status = models.CharField(
max_length=15,
choices=statusChoices,
blank=True,
null=True,
)
group = models.ForeignKey(
WorkGroup,
null=True,
blank=True,
)
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
if self.startTime is None and self.processor is not None:
self.startTime = datetime.date.today()
self.status = 'in progress'
if self.status is None:
self.status = statusChoices[0][1]
if self.group is None:
self.group = self.creator.taskCardUser.workGroup
super(TaskCard, self).save(*args, **kwargs)
def commentsDescending(self, *args, **kwargs):
return self.comments.order_by('-published',)
class TaskCardUser(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL,
related_name='taskCardUser'
)
workGroup = models.ForeignKey(
WorkGroup,
related_name='taskCardUser'
)
def __unicode__(self):
return u'%s' % (self.user)
#@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def connectTaskCardUser(sender, instance, created, **kwargs):
if created:
TaskCardUser.objects.create(user=instance, workGroup=WorkGroup.objects.get(id=1))
post_save.connect(connectTaskCardUser, sender=settings.AUTH_USER_MODEL)
class Comment(models.Model):
taskCard = models.ForeignKey(
TaskCard,
related_name = 'comments',
)
author = models.ForeignKey(
settings.AUTH_USER_MODEL
)
published = models.DateTimeField(
null=True,
blank=True,
)
text = models.CharField(
max_length=255,
)
def save(self, *args, **kwargs):
self.published = datetime.datetime.now()
super(Comment, self).save(*args, **kwargs)
class Meta:
unique_together = ('taskCard', 'published')
#class Sprint(models.Model):
# startTime = models.DateTimeField()
# endTime = models.DateTimeField()
|
Die-Turtles/application
|
taskCards/models.py
|
Python
|
gpl-2.0
| 3,381
|
# Copyright (c) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
import unittest
from optparse import make_option
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.multicommandtool import MultiCommandTool, Command, TryAgain
class TrivialCommand(Command):
name = "trivial"
show_in_main_help = True
def __init__(self, **kwargs):
Command.__init__(self, "help text", **kwargs)
def execute(self, options, args, tool):
pass
class UncommonCommand(TrivialCommand):
name = "uncommon"
show_in_main_help = False
class LikesToRetry(Command):
name = "likes-to-retry"
show_in_main_help = True
def __init__(self, **kwargs):
Command.__init__(self, "help text", **kwargs)
self.execute_count = 0
def execute(self, options, args, tool):
self.execute_count += 1
if self.execute_count < 2:
raise TryAgain()
class CommandTest(unittest.TestCase):
def test_name_with_arguments(self):
command_with_args = TrivialCommand(argument_names="ARG1 ARG2")
self.assertEqual(command_with_args.name_with_arguments(), "trivial ARG1 ARG2")
command_with_args = TrivialCommand(options=[make_option("--my_option")])
self.assertEqual(command_with_args.name_with_arguments(), "trivial [options]")
def test_parse_required_arguments(self):
self.assertEqual(Command._parse_required_arguments("ARG1 ARG2"), ["ARG1", "ARG2"])
self.assertEqual(Command._parse_required_arguments("[ARG1] [ARG2]"), [])
self.assertEqual(Command._parse_required_arguments("[ARG1] ARG2"), ["ARG2"])
# Note: We might make our arg parsing smarter in the future and allow this type of arguments string.
self.assertRaises(Exception, Command._parse_required_arguments, "[ARG1 ARG2]")
def test_required_arguments(self):
two_required_arguments = TrivialCommand(argument_names="ARG1 ARG2 [ARG3]")
expected_missing_args_error = "2 arguments required, 1 argument provided. Provided: 'foo' Required: ARG1 ARG2\nSee 'trivial-tool help trivial' for usage.\n"
exit_code = OutputCapture().assert_outputs(self, two_required_arguments.check_arguments_and_execute, [None, ["foo"], TrivialTool()], expected_stderr=expected_missing_args_error)
self.assertEqual(exit_code, 1)
class TrivialTool(MultiCommandTool):
def __init__(self, commands=None):
MultiCommandTool.__init__(self, name="trivial-tool", commands=commands)
def path(self):
return __file__
def should_execute_command(self, command):
return (True, None)
class MultiCommandToolTest(unittest.TestCase):
def _assert_split(self, args, expected_split):
self.assertEqual(MultiCommandTool._split_command_name_from_args(args), expected_split)
def test_split_args(self):
# MultiCommandToolTest._split_command_name_from_args returns: (command, args)
full_args = ["--global-option", "command", "--option", "arg"]
full_args_expected = ("command", ["--global-option", "--option", "arg"])
self._assert_split(full_args, full_args_expected)
full_args = []
full_args_expected = (None, [])
self._assert_split(full_args, full_args_expected)
full_args = ["command", "arg"]
full_args_expected = ("command", ["arg"])
self._assert_split(full_args, full_args_expected)
def test_command_by_name(self):
# This also tests Command auto-discovery.
tool = TrivialTool()
self.assertEqual(tool.command_by_name("trivial").name, "trivial")
self.assertEqual(tool.command_by_name("bar"), None)
def _assert_tool_main_outputs(self, tool, main_args, expected_stdout, expected_stderr = "", expected_exit_code=0):
exit_code = OutputCapture().assert_outputs(self, tool.main, [main_args], expected_stdout=expected_stdout, expected_stderr=expected_stderr)
self.assertEqual(exit_code, expected_exit_code)
def test_retry(self):
likes_to_retry = LikesToRetry()
tool = TrivialTool(commands=[likes_to_retry])
tool.main(["tool", "likes-to-retry"])
self.assertEqual(likes_to_retry.execute_count, 2)
def test_global_help(self):
tool = TrivialTool(commands=[TrivialCommand(), UncommonCommand()])
expected_common_commands_help = """Usage: trivial-tool [options] COMMAND [ARGS]
Options:
-h, --help show this help message and exit
Common trivial-tool commands:
trivial help text
See 'trivial-tool help --all-commands' to list all commands.
See 'trivial-tool help COMMAND' for more information on a specific command.
"""
self._assert_tool_main_outputs(tool, ["tool"], expected_common_commands_help)
self._assert_tool_main_outputs(tool, ["tool", "help"], expected_common_commands_help)
expected_all_commands_help = """Usage: trivial-tool [options] COMMAND [ARGS]
Options:
-h, --help show this help message and exit
All trivial-tool commands:
help Display information about this program or its subcommands
trivial help text
uncommon help text
See 'trivial-tool help --all-commands' to list all commands.
See 'trivial-tool help COMMAND' for more information on a specific command.
"""
self._assert_tool_main_outputs(tool, ["tool", "help", "--all-commands"], expected_all_commands_help)
# Test that arguments can be passed before commands as well
self._assert_tool_main_outputs(tool, ["tool", "--all-commands", "help"], expected_all_commands_help)
def test_command_help(self):
command_with_options = TrivialCommand(options=[make_option("--my_option")], long_help="LONG HELP")
tool = TrivialTool(commands=[command_with_options])
expected_subcommand_help = "trivial [options] help text\n\nLONG HELP\n\nOptions:\n --my_option=MY_OPTION\n\n"
self._assert_tool_main_outputs(tool, ["tool", "help", "trivial"], expected_subcommand_help)
if __name__ == "__main__":
unittest.main()
|
mogoweb/webkit_for_android5.1
|
webkit/Tools/Scripts/webkitpy/tool/multicommandtool_unittest.py
|
Python
|
apache-2.0
| 7,516
|
import json
from collections import OrderedDict, namedtuple
from contextlib import contextmanager
from celery import states
from celery.exceptions import Ignore
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from memoized import memoized
from corehq.apps.case_importer.const import LookupErrors
from corehq.apps.case_importer.exceptions import (
ImporterRawError,
ImporterExcelError,
ImporterExcelFileEncrypted,
ImporterFileNotFound,
ImporterRefError,
)
from corehq.form_processor.exceptions import CaseNotFound
from corehq.form_processor.models import CommCareCase
from corehq.util.workbook_reading import (
SpreadsheetFileEncrypted,
SpreadsheetFileInvalidError,
SpreadsheetFileNotFound,
Workbook,
open_any_workbook,
)
from soil.progress import update_task_state
# Don't allow users to change the case type by accident using a custom field. But do allow users to change
# owner_id, external_id, etc. (See also custom_data_fields.models.RESERVED_WORDS)
RESERVED_FIELDS = ('type', 'closed', 'parent_ref')
EXTERNAL_ID = 'external_id'
class ImporterConfig(namedtuple('ImporterConfig', [
'couch_user_id',
'excel_fields',
'case_fields',
'custom_fields',
'search_column',
'case_type',
'search_field',
'create_new_cases',
])):
"""
Class for storing config values from the POST in a format that can
be pickled and passed to celery tasks.
"""
def __new__(cls, *args, **kwargs):
args, kwargs = cls.__detect_schema_change(args, kwargs)
return super(cls, ImporterConfig).__new__(cls, *args, **kwargs)
@staticmethod
def __detect_schema_change(args, kwargs):
# before we removed key_column, value_column, named_columns
# from positions 5-7
if len(args) == 11 and not kwargs:
return args[:5] + args[8:], {}
else:
return args, kwargs
def to_dict(self):
return self._asdict()
def to_json(self):
return json.dumps(self.to_dict())
@classmethod
def from_dict(cls, json_dict):
return cls(**json_dict)
@classmethod
def from_json(cls, json_rep):
return cls.from_dict(json.loads(json_rep))
@classmethod
def from_request(cls, request):
return cls(
couch_user_id=request.couch_user._id,
excel_fields=request.POST.getlist('excel_field[]'),
case_fields=request.POST.getlist('case_field[]'),
custom_fields=request.POST.getlist('custom_field[]'),
search_column=request.POST['search_column'],
case_type=request.POST['case_type'],
search_field=request.POST['search_field'],
create_new_cases=request.POST['create_new_cases'] == 'True',
)
class WorksheetWrapper(object):
def __init__(self, worksheet):
self._worksheet = worksheet
@classmethod
def from_workbook(cls, workbook):
if not isinstance(workbook, Workbook):
raise AssertionError(
"WorksheetWrapper.from_workbook called without Workbook object")
elif not workbook.worksheets:
raise SpreadsheetFileInvalidError(
_("It seems as though your spreadsheet contains no sheets. Please resave it and try again."))
else:
return cls(workbook.worksheets[0])
@cached_property
def _headers_by_index(self):
try:
header_row = next(self.iter_rows())
except StopIteration:
header_row = []
return OrderedDict(
(i, header) for i, header in enumerate(header_row)
if header # remove None columns the library sometimes returns
)
def get_header_columns(self):
return list(self._headers_by_index.values())
@property
def max_row(self):
return self._worksheet.max_row
def iter_rows(self):
for row in self._worksheet.iter_rows():
yield [cell.value for cell in row]
def iter_row_dicts(self):
for row in self.iter_rows():
yield {
self._headers_by_index[i]: value
for i, value in enumerate(row)
if i in self._headers_by_index
}
def lookup_case(search_field, search_id, domain, case_type):
"""
Attempt to find the case by the provided search_field and search_id.
Returns a tuple with case (if found) and an
error code (if there was an error in lookup).
"""
if search_field == 'case_id':
try:
case = CommCareCase.objects.get_case(search_id, domain)
if case.type == case_type:
return (case, None)
except CaseNotFound:
pass
elif search_field == EXTERNAL_ID:
try:
case = CommCareCase.objects.get_case_by_external_id(
domain, search_id, case_type=case_type, raise_multiple=True)
except CommCareCase.MultipleObjectsReturned:
return (None, LookupErrors.MultipleResults)
if case is not None:
return (case, None)
return (None, LookupErrors.NotFound)
def open_spreadsheet_download_ref(filename):
"""
open a spreadsheet download ref just to test there are no errors opening it
"""
with get_spreadsheet(filename):
pass
@contextmanager
def get_spreadsheet(filename):
try:
with open_any_workbook(filename) as workbook:
yield WorksheetWrapper.from_workbook(workbook)
except SpreadsheetFileEncrypted as e:
raise ImporterExcelFileEncrypted(str(e))
except SpreadsheetFileNotFound as e:
raise ImporterFileNotFound(str(e))
except SpreadsheetFileInvalidError as e:
raise ImporterExcelError(str(e))
def get_importer_error_message(e):
if isinstance(e, ImporterRefError):
# I'm not totally sure this is the right error, but it's what was being
# used before. (I think people were just calling _spreadsheet_expired
# or otherwise blaming expired sessions whenever anything unexpected
# happened though...)
return _('Sorry, your session has expired. Please start over and try again.')
elif isinstance(e, ImporterFileNotFound):
return _('The session containing the file you uploaded has expired. '
'Please upload a new one.')
elif isinstance(e, ImporterExcelFileEncrypted):
return _('The file you want to import is password protected. '
'Please choose a file that is not password protected.')
elif isinstance(e, ImporterExcelError):
return _("The file uploaded has the following error: {}").format(str(e))
elif isinstance(e, ImporterRawError):
return str(e)
else:
return _("Error: {}").format(str(e))
def exit_celery_with_error_message(task, error_message):
"""
Call this function and return the value from within a celery task to abort
with an error message that gets passed on in a way that case importer
will pick up and display.
Currently it doesn't return anything and does all its magic by manually
setting task metadata and raising Ignore,
but the internals could change to do this through a return value instead.
"""
update_task_state(task, states.FAILURE, get_interned_exception(error_message))
raise Ignore()
@memoized
def get_interned_exception(message):
"""
In tests, it's important that the error message is exactly the same object.
"""
return Exception(message)
|
dimagi/commcare-hq
|
corehq/apps/case_importer/util.py
|
Python
|
bsd-3-clause
| 7,577
|
# Copyright 2017 Carlos Dauden <carlos.dauden@tecnativa.com>
# Copyright 2017 Thorsten Vocks <thorsten.vocks@openbig.org>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.tests.common import SavepointCase
from odoo.exceptions import ValidationError
class TestProductCatalogPrint(SavepointCase):
@classmethod
def setUpClass(cls):
super(TestProductCatalogPrint, cls).setUpClass()
cls.pricelist = cls.env.ref("product.list0")
cls.product = cls.env["product.product"].create(
{"name": "Product for test", "default_code": "TESTPROD01"}
)
cls.partner = cls.env["res.partner"].create(
{
"name": "Partner for test",
"property_product_pricelist": cls.pricelist.id,
}
)
cls.wiz_obj = cls.env["product.catalog.print"]
def test_defaults(self):
wiz = self.wiz_obj.new()
res = wiz.with_context(
active_model="product.pricelist", active_id=self.pricelist.id
).default_get([])
self.assertEqual(res["pricelist_id"], self.pricelist.id)
res = wiz.with_context(
active_model="res.partner", active_id=self.partner.id
).default_get([])
self.assertEqual(
res["pricelist_id"], self.partner.property_product_pricelist.id
)
res = wiz.with_context(
active_model="product.template",
active_ids=self.product.product_tmpl_id.ids,
).default_get([])
self.assertEqual(
res["product_tmpl_ids"][0][2], self.product.product_tmpl_id.ids
)
res = wiz.with_context(
active_model="product.product", active_ids=self.product.ids
).default_get([])
self.assertEqual(res["product_ids"][0][2], self.product.ids)
self.assertTrue(res["show_variants"])
with self.assertRaises(ValidationError):
wiz.print_report()
wiz.show_sale_price = True
res = wiz.print_report()
self.assertIn("report_name", res)
|
openbig/saleorderdetails
|
product_catalog_print/tests/test_product_catalog_print.py
|
Python
|
agpl-3.0
| 2,074
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='wrabbit',
version='0.1',
description='warren rabbitmq wrapper',
author='Jeremiah Campbell',
author_email='jeremy@meantheory.com',
url='https://github.com/warrenprotocol/wrabbit',
download_url='https://github.com/warrenprotocol/wrabbit/tarball/0.1',
license='MIT',
packages=['wrabbit',],
classifiers=[
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: MIT License',
'Development Status :: 4 - Beta'
],
)
|
meantheory/wrabbit
|
setup.py
|
Python
|
mit
| 572
|
"""Implements a Deep Belief Network."""
from dbm import *
class DBN(DBM):
def __init__(self, net, t_op=None, e_op=None):
rbm, upward_net, downward_net, junction_layers = DBN.SplitDBN(net)
self.rbm = DBM(rbm, t_op, e_op)
self.upward_net = NeuralNet(upward_net, t_op, e_op)
self.downward_net = NeuralNet(downward_net, t_op, e_op)
self.junction_layers = junction_layers
self.net = self.rbm.net
self.t_op = self.rbm.t_op
self.e_op = self.rbm.e_op
self.verbose = self.rbm.verbose
self.batchsize = self.t_op.batchsize
def CopyModelToCPU(self):
self.rbm.CopyModelToCPU()
def DeepCopy(self):
return CopyModel(self.rbm.net)
def Show(self):
"""Visualize the state of the layers and edges in the network."""
self.rbm.Show()
self.upward_net.Show()
self.downward_net.Show()
def PrintNetwork(self):
print 'RBM:'
self.rbm.PrintNetwork()
print 'Up:'
self.upward_net.PrintNetwork()
print 'Down:'
self.downward_net.PrintNetwork()
def ExchangeGlobalInfo(self):
for layer in self.rbm.layer:
layer.GetGlobalInfo(self)
for edge in self.rbm.edge:
edge.GetGlobalInfo(self)
@staticmethod
def SplitDBN(net):
#net = ReadModel(dbn_file)
rbm = deepnet_pb2.Model()
rbm.CopyFrom(net)
rbm.name = '%s_rbm' % net.name
rbm.model_type = deepnet_pb2.Model.DBM
directed_edges = []
undirected_edges = []
layer1 = set() # Layers that are touched by directed edges.
layer2 = set() # Layers that are touched by undirected edges.
for e in net.edge:
if e.directed:
directed_edges.append(e)
layer1.add(e.node1)
layer1.add(e.node2)
else:
undirected_edges.append(e)
layer2.add(e.node1)
layer2.add(e.node2)
junction_layers = list(layer1.intersection(layer2))
# CONTRUCT RBM.
del rbm.edge[:]
for e in undirected_edges:
rbm.edge.extend([e])
del rbm.layer[:]
for node in list(layer2):
l = next(l for l in net.layer if l.name == node)
layer = rbm.layer.add()
layer.CopyFrom(l)
if node in junction_layers:
layer.is_input = True
del layer.param[:]
for p in l.param:
if p.name == 'bias':
continue
elif p.name == 'bias_generative':
p_copy = layer.param.add()
p_copy.CopyFrom(p)
p_copy.name = 'bias'
else:
layer.param.extend([p])
# CONSTRUCT DOWNNARD NET.
down_net = deepnet_pb2.Model()
down_net.CopyFrom(net)
down_net.name = '%s_downward_net' % net.name
down_net.model_type = deepnet_pb2.Model.FEED_FORWARD_NET
del down_net.edge[:]
for e in directed_edges:
down_net.edge.extend([e])
del down_net.layer[:]
for node in list(layer1):
l = next(l for l in net.layer if l.name == node)
layer_down = down_net.layer.add()
layer_down.CopyFrom(l)
if l.is_input:
layer_down.is_input = False
if node in junction_layers:
layer_down.is_input = True
del layer_down.param[:]
for p in l.param:
if p.name == 'bias':
continue
elif p.name == 'bias_generative':
p_copy = layer_down.param.add()
p_copy.CopyFrom(p)
p_copy.name = 'bias'
else:
layer_down.param.extend([p])
# CONSTRUCT UPWARD NET.
up_net = deepnet_pb2.Model()
up_net.CopyFrom(net)
up_net.name = '%s_upward_net' % net.name
up_net.model_type = deepnet_pb2.Model.FEED_FORWARD_NET
del up_net.edge[:]
for e in directed_edges:
e_up = DBN.ReverseEdge(e)
up_net.edge.extend([e_up])
del up_net.layer[:]
for node in list(layer1):
l = next(l for l in net.layer if l.name == node)
layer_up = up_net.layer.add()
layer_up.CopyFrom(l)
del layer_up.param[:]
for p in l.param:
if p.name == 'bias_generative':
continue
else:
layer_up.param.extend([p])
return rbm, up_net, down_net, junction_layers
@staticmethod
def ReverseEdge(e):
rev_e = deepnet_pb2.Edge()
rev_e.CopyFrom(e)
rev_e.node1 = e.node2
rev_e.node2 = e.node1
rev_e.up_factor = e.down_factor
rev_e.down_factor = e.up_factor
for p in rev_e.param:
if p.name == 'weight':
if p.initialization == deepnet_pb2.Parameter.PRETRAINED:
p.transpose_pretrained = not p.transpose_pretrained
elif p.mat:
mat = ParameterAsNumpy(p).T
p.mat = NumpyAsParameter(mat)
del p.dimensions
for dim in mat.shape:
p.dimensions.add(dim)
return rev_e
def LoadModelOnGPU(self, *args, **kwargs):
self.rbm.LoadModelOnGPU(*args, **kwargs)
self.upward_net.LoadModelOnGPU(*args, **kwargs)
self.downward_net.LoadModelOnGPU(*args, **kwargs)
self.TieUpNets()
def TieUpNets(self):
# Tie up nets.
for layer_name in self.junction_layers:
rbm_layer = next(l for l in self.rbm.layer if l.name == layer_name)
up_layer = next(l for l in self.upward_net.layer if l.name == layer_name)
down_layer = next(l for l in self.downward_net.layer if l.name == layer_name)
rbm_layer.data = up_layer.state
down_layer.data = rbm_layer.state
def ResetBatchsize(self, batchsize):
self.batchsize = batchsize
self.rbm.ResetBatchsize(batchsize)
self.upward_net.ResetBatchsize(batchsize)
self.downward_net.ResetBatchsize(batchsize)
self.TieUpNets()
def SetUpData(self, *args, **kwargs):
self.upward_net.SetUpData(*args, **kwargs)
self.train_data_handler = self.upward_net.train_data_handler
self.validation_data_handler = self.upward_net.validation_data_handler
self.test_data_handler = self.upward_net.test_data_handler
def GetBatch(self, handler=None):
if handler:
data_list = handler.Get()
if data_list[0].shape[1] != self.batchsize:
self.ResetBatchsize(data_list[0].shape[1])
for i, layer in enumerate(self.upward_net.datalayer):
layer.SetData(data_list[i])
for layer in self.upward_net.tied_datalayer:
layer.SetData(layer.tied_to.data)
def TrainOneBatch(self, step):
self.upward_net.ForwardPropagate(train=True, step=step)
return self.rbm.TrainOneBatch(step)
def PositivePhase(self, train=False, evaluate=False, step=0):
self.upward_net.ForwardPropagate(train=train, step=step)
return self.rbm.PositivePhase(train=train, evaluate=evaluate, step=step)
#self.downward_net.ForwardPropagate(train=train, step=step)
def NegativePhase(self, *args, **kwargs):
return self.rbm.NegativePhase(*args, **kwargs)
def Inference(self, steps, layernames, unclamped_layers, output_dir, memory='1G', dataset='test', method='gibbs'):
layers_to_infer = [self.GetLayerByName(l, down=True) for l in layernames]
layers_to_unclamp = [self.GetLayerByName(l) for l in unclamped_layers]
numdim_list = [layer.state.shape[0] for layer in layers_to_infer]
upward_net_unclamped_inputs = []
for l in layers_to_unclamp:
l.is_input = False
l.is_initialized = True
if l in self.rbm.layer:
self.rbm.pos_phase_order.append(l)
else:
upward_net_unclamped_inputs.append(l)
if dataset == 'train':
datagetter = self.GetTrainBatch
if self.train_data_handler is None:
return
numbatches = self.train_data_handler.num_batches
size = numbatches * self.train_data_handler.batchsize
elif dataset == 'validation':
datagetter = self.GetValidationBatch
if self.validation_data_handler is None:
return
numbatches = self.validation_data_handler.num_batches
size = numbatches * self.validation_data_handler.batchsize
elif dataset == 'test':
datagetter = self.GetTestBatch
if self.test_data_handler is None:
return
numbatches = self.test_data_handler.num_batches
size = numbatches * self.test_data_handler.batchsize
dw = DataWriter(layernames, output_dir, memory, numdim_list, size)
gibbs = method == 'gibbs'
mf = method == 'mf'
for batch in range(numbatches):
sys.stdout.write('\r%d' % (batch+1))
sys.stdout.flush()
datagetter()
for l in upward_net_unclamped_inputs:
l.data.assign(0)
self.upward_net.ForwardPropagate()
for node in self.rbm.node_list:
if node.is_input or node.is_initialized:
node.GetData()
if gibbs:
node.sample.assign(node.state)
else:
node.ResetState(rand=False)
for i in range(steps):
for node in self.rbm.pos_phase_order:
self.ComputeUp(node, use_samples=gibbs)
if gibbs:
node.Sample()
self.downward_net.ForwardPropagate()
output = [l.state.asarray().T for l in layers_to_infer]
dw.Submit(output)
sys.stdout.write('\n')
size = dw.Commit()
return size[0]
def GetLayerByName(self, layername, down=False):
layer = self.rbm.GetLayerByName(layername)
if layer is None:
if down:
layer = self.downward_net.GetLayerByName(layername)
else:
layer = self.upward_net.GetLayerByName(layername)
return layer
|
abdulqayyum/deepnet
|
deepnet/dbn.py
|
Python
|
bsd-3-clause
| 9,260
|
"""Users and groups.
"""
__docformat__ = 'restructuredtext en'
from .utils import *
from .enums import *
class User(Cached):
"""Represents a Skype user.
"""
_ValidateHandle = str
def __repr__(self):
return Cached.__repr__(self, 'Handle')
def _Property(self, PropName, Set=None, Cache=True):
return self._Owner._Property('USER', self.Handle, PropName, Set, Cache)
def SaveAvatarToFile(self, Filename, AvatarId=1):
"""Saves user avatar to a file.
:Parameters:
Filename : str
Destination path.
AvatarId : int
Avatar Id.
"""
s = 'USER %s AVATAR %s %s' % (self.Handle, AvatarId, path2unicode(Filename))
self._Owner._DoCommand('GET %s' % s, s)
def SetBuddyStatusPendingAuthorization(self, Text=''):
"""Sets the BuddyStaus property to `enums.budPendingAuthorization`
additionally specifying the authorization text.
:Parameters:
Text : unicode
The authorization text.
:see: `BuddyStatus`
"""
self._Property('BUDDYSTATUS', '%d %s' % (budPendingAuthorization, tounicode(Text)), Cache=False)
def _GetAbout(self):
return self._Property('ABOUT')
About = property(_GetAbout,
doc="""About text of the user.
:type: unicode
""")
def _GetAliases(self):
return split(self._Property('ALIASES'))
Aliases = property(_GetAliases,
doc="""Aliases of the user.
:type: list of str
""")
def _GetBirthday(self):
value = self._Property('BIRTHDAY')
if len(value) == 8:
from datetime import date
from time import strptime
return date(*strptime(value, '%Y%m%d')[:3])
Birthday = property(_GetBirthday,
doc="""Birthday of the user. None if not set.
:type: datetime.date or None
""")
def _GetBuddyStatus(self):
return int(self._Property('BUDDYSTATUS'))
def _SetBuddyStatus(self, Value):
self._Property('BUDDYSTATUS', int(Value), Cache=False)
BuddyStatus = property(_GetBuddyStatus, _SetBuddyStatus,
doc="""Buddy status of the user.
:type: `enums`.bud*
""")
def _GetCanLeaveVoicemail(self):
return (self._Property('CAN_LEAVE_VM') == 'TRUE')
CanLeaveVoicemail = property(_GetCanLeaveVoicemail,
doc="""Tells if it is possible to send voicemail to the user.
:type: bool
""")
def _GetCity(self):
return self._Property('CITY')
City = property(_GetCity,
doc="""City of the user.
:type: unicode
""")
def _GetCountry(self):
value = self._Property('COUNTRY')
if value:
if self._Owner.Protocol >= 4:
value = chop(value)[-1]
return value
Country = property(_GetCountry,
doc="""Country of the user.
:type: unicode
""")
def _GetCountryCode(self):
if self._Owner.Protocol < 4:
return ''
value = self._Property('COUNTRY')
if value:
value = chop(value)[0]
return str(value)
CountryCode = property(_GetCountryCode,
doc="""ISO country code of the user.
:type: str
""")
def _GetDisplayName(self):
return self._Property('DISPLAYNAME')
def _SetDisplayName(self, Value):
self._Property('DISPLAYNAME', Value)
DisplayName = property(_GetDisplayName, _SetDisplayName,
doc="""Display name of the user.
:type: unicode
""")
def _GetHandle(self):
return self._Handle
Handle = property(_GetHandle,
doc="""Skypename of the user.
:type: str
""")
def _GetFullName(self):
return self._Property('FULLNAME')
FullName = property(_GetFullName,
doc="""Full name of the user.
:type: unicode
""")
def _GetHasCallEquipment(self):
return self._Property('HASCALLEQUIPMENT') == 'TRUE'
HasCallEquipment = property(_GetHasCallEquipment,
doc="""Tells if the user has call equipment.
:type: bool
""")
def _GetHomepage(self):
return self._Property('HOMEPAGE')
Homepage = property(_GetHomepage,
doc="""Homepage URL of the user.
:type: unicode
""")
def _GetIsAuthorized(self):
return (self._Property('ISAUTHORIZED') == 'TRUE')
def _SetIsAuthorized(self, Value):
self._Property('ISAUTHORIZED', cndexp(Value, 'TRUE', 'FALSE'))
IsAuthorized = property(_GetIsAuthorized, _SetIsAuthorized,
doc="""Tells if the user is authorized to contact us.
:type: bool
""")
def _GetIsBlocked(self):
return (self._Property('ISBLOCKED') == 'TRUE')
def _SetIsBlocked(self, Value):
self._Property('ISBLOCKED', cndexp(Value, 'TRUE', 'FALSE'))
IsBlocked = property(_GetIsBlocked, _SetIsBlocked,
doc="""Tells whether this user is blocked or not.
:type: bool
""")
def _GetIsCallForwardActive(self):
return (self._Property('IS_CF_ACTIVE') == 'TRUE')
IsCallForwardActive = property(_GetIsCallForwardActive,
doc="""Tells whether the user has Call Forwarding activated or not.
:type: bool
""")
def _GetIsSkypeOutContact(self):
return (self.OnlineStatus == olsSkypeOut)
IsSkypeOutContact = property(_GetIsSkypeOutContact,
doc="""Tells whether a user is a SkypeOut contact.
:type: bool
""")
def _GetIsVideoCapable(self):
return (self._Property('IS_VIDEO_CAPABLE') == 'TRUE')
IsVideoCapable = property(_GetIsVideoCapable,
doc="""Tells if the user has video capability.
:type: bool
""")
def _GetIsVoicemailCapable(self):
return (self._Property('IS_VOICEMAIL_CAPABLE') == 'TRUE')
IsVoicemailCapable = property(_GetIsVoicemailCapable,
doc="""Tells if the user has voicemail capability.
:type: bool
""")
def _GetLanguage(self):
value = self._Property('LANGUAGE')
if value:
if self._Owner.Protocol >= 4:
value = chop(value)[-1]
return value
Language = property(_GetLanguage,
doc="""The language of the user.
:type: unicode
""")
def _GetLanguageCode(self):
if self._Owner.Protocol < 4:
return ''
value = self._Property('LANGUAGE')
if value:
value = chop(value)[0]
return str(value)
LanguageCode = property(_GetLanguageCode,
doc="""The ISO language code of the user.
:type: str
""")
def _GetLastOnline(self):
return float(self._Property('LASTONLINETIMESTAMP'))
LastOnline = property(_GetLastOnline,
doc="""The time when a user was last online as a timestamp.
:type: float
:see: `LastOnlineDatetime`
""")
def _GetLastOnlineDatetime(self):
from datetime import datetime
return datetime.fromtimestamp(self.LastOnline)
LastOnlineDatetime = property(_GetLastOnlineDatetime,
doc="""The time when a user was last online as a datetime.
:type: datetime.datetime
:see: `LastOnline`
""")
def _GetMoodText(self):
return self._Property('MOOD_TEXT')
MoodText = property(_GetMoodText,
doc="""Mood text of the user.
:type: unicode
""")
def _GetNumberOfAuthBuddies(self):
return int(self._Property('NROF_AUTHED_BUDDIES'))
NumberOfAuthBuddies = property(_GetNumberOfAuthBuddies,
doc="""Number of authenticated buddies in user's contact list.
:type: int
""")
def _GetOnlineStatus(self):
return str(self._Property('ONLINESTATUS'))
OnlineStatus = property(_GetOnlineStatus,
doc="""Online status of the user.
:type: `enums`.ols*
""")
def _GetPhoneHome(self):
return self._Property('PHONE_HOME')
PhoneHome = property(_GetPhoneHome,
doc="""Home telephone number of the user.
:type: unicode
""")
def _GetPhoneMobile(self):
return self._Property('PHONE_MOBILE')
PhoneMobile = property(_GetPhoneMobile,
doc="""Mobile telephone number of the user.
:type: unicode
""")
def _GetPhoneOffice(self):
return self._Property('PHONE_OFFICE')
PhoneOffice = property(_GetPhoneOffice,
doc="""Office telephone number of the user.
:type: unicode
""")
def _GetProvince(self):
return self._Property('PROVINCE')
Province = property(_GetProvince,
doc="""Province of the user.
:type: unicode
""")
def _GetReceivedAuthRequest(self):
return self._Property('RECEIVEDAUTHREQUEST')
ReceivedAuthRequest = property(_GetReceivedAuthRequest,
doc="""Text message for authorization request. Available only when user asks for authorization.
:type: unicode
""")
def _GetRichMoodText(self):
return self._Property('RICH_MOOD_TEXT')
RichMoodText = property(_GetRichMoodText,
doc="""Advanced version of `MoodText`.
:type: unicode
:see: https://developer.skype.com/Docs/ApiDoc/SET_PROFILE_RICH_MOOD_TEXT
""")
def _GetSex(self):
return str(self._Property('SEX'))
Sex = property(_GetSex,
doc="""Sex of the user.
:type: `enums`.usex*
""")
def _GetSpeedDial(self):
return self._Property('SPEEDDIAL')
def _SetSpeedDial(self, Value):
self._Property('SPEEDDIAL', Value)
SpeedDial = property(_GetSpeedDial, _SetSpeedDial,
doc="""Speed-dial code assigned to the user.
:type: unicode
""")
def _GetTimezone(self):
return int(self._Property('TIMEZONE'))
Timezone = property(_GetTimezone,
doc="""Timezone of the user in minutes from GMT.
:type: int
""")
class UserCollection(CachedCollection):
_CachedType = User
class Group(Cached):
"""Represents a group of Skype users.
"""
_ValidateHandle = int
def __repr__(self):
return Cached.__repr__(self, 'Id')
def _Alter(self, AlterName, Args=None):
return self._Owner._Alter('GROUP', self.Id, AlterName, Args)
def _Property(self, PropName, Value=None, Cache=True):
return self._Owner._Property('GROUP', self.Id, PropName, Value, Cache)
def Accept(self):
"""Accepts an invitation to join a shared contact group.
"""
self._Alter('ACCEPT')
def AddUser(self, Username):
"""Adds new a user to the group.
:Parameters:
Username : str
Skypename of the new user.
"""
self._Alter('ADDUSER', Username)
def Decline(self):
"""Declines an invitation to join a shared contact group.
"""
self._Alter('DECLINE')
def RemoveUser(self, Username):
"""Removes a user from the group.
:Parameters:
Username : str
Skypename of the user.
"""
self._Alter('REMOVEUSER', Username)
def Share(self, MessageText=''):
"""Shares a contact group.
:Parameters:
MessageText : unicode
Message text for group members.
"""
self._Alter('SHARE', MessageText)
def _GetCustomGroupId(self):
return str(self._Property('CUSTOM_GROUP_ID'))
CustomGroupId = property(_GetCustomGroupId,
doc="""Persistent group ID. The custom group ID is a persistent value that does not change.
:type: str
""")
def _GetDisplayName(self):
return self._Property('DISPLAYNAME')
def _SetDisplayName(self, Value):
self._Property('DISPLAYNAME', Value)
DisplayName = property(_GetDisplayName, _SetDisplayName,
doc="""Display name of the group.
:type: unicode
""")
def _GetId(self):
return self._Handle
Id = property(_GetId,
doc="""Group Id.
:type: int
""")
def _GetIsExpanded(self):
return self._Property('EXPANDED') == 'TRUE'
IsExpanded = property(_GetIsExpanded,
doc="""Tells if the group is expanded in the client.
:type: bool
""")
def _GetIsVisible(self):
return self._Property('VISIBLE') == 'TRUE'
IsVisible = property(_GetIsVisible,
doc="""Tells if the group is visible in the client.
:type: bool
""")
def _GetOnlineUsers(self):
return UserCollection(self._Owner, (x.Handle for x in self.Users if x.OnlineStatus == olsOnline))
OnlineUsers = property(_GetOnlineUsers,
doc="""Users of the group that are online
:type: `UserCollection`
""")
def _GetType(self):
return str(self._Property('TYPE'))
Type = property(_GetType,
doc="""Group type.
:type: `enums`.grp*
""")
def _GetUsers(self):
return UserCollection(self._Owner, split(self._Property('USERS', Cache=False), ', '))
Users = property(_GetUsers,
doc="""Users in this group.
:type: `UserCollection`
""")
class GroupCollection(CachedCollection):
_CachedType = Group
|
FloatingGhost/skype4py
|
Skype4Py/user.py
|
Python
|
bsd-3-clause
| 12,859
|
# Copyright 2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timedelta
import time
from c7n.config import Bag
from c7n.output import metrics_outputs
from c7n_gcp.output import StackDriverMetrics
from gcp_common import BaseTest
class MetricsOutputTest(BaseTest):
def test_metrics_selector(self):
self.assertEqual(
metrics_outputs.get('gcp'),
StackDriverMetrics)
def test_metrics_output(self):
project_id = 'cloud-custodian'
factory = self.replay_flight_data('output-metrics', project_id=project_id)
ctx = Bag(session_factory=factory,
policy=Bag(name='custodian-works', resource_type='gcp.function'))
conf = Bag()
metrics = StackDriverMetrics(ctx, conf)
metrics.put_metric('ResourceCount', 43, 'Count', Scope='Policy')
metrics.flush()
if self.recording:
time.sleep(42)
session = factory()
client = session.client('monitoring', 'v3', 'projects.timeSeries')
results = client.execute_command(
'list', {
'name': 'projects/{}'.format(project_id),
'filter': 'metric.type="custom.googleapis.com/custodian/policy/resourcecount"',
'pageSize': 3,
'interval_startTime': (
datetime.utcnow() - timedelta(minutes=5)).isoformat('T') + 'Z',
'interval_endTime': datetime.utcnow().isoformat('T') + 'Z'
})
self.assertEqual(
results['timeSeries'],
[{u'metric': {
u'labels': {
u'policy': u'custodian-works',
u'project_id': u'cloud-custodian'},
u'type': u'custom.googleapis.com/custodian/policy/resourcecount'},
u'metricKind': u'GAUGE',
u'points': [{
u'interval': {
u'endTime': u'2018-08-12T22:30:53.524505Z',
u'startTime': u'2018-08-12T22:30:53.524505Z'},
u'value': {u'int64Value': u'43'}}],
u'resource': {
u'labels': {u'project_id': u'cloud-custodian'},
u'type': u'global'},
u'valueType': u'INT64'}])
def test_metrics_output_set_write_project_id(self):
project_id = 'cloud-custodian-sub'
write_project_id = 'cloud-custodian'
factory = self.replay_flight_data('output-metrics', project_id=project_id)
ctx = Bag(session_factory=factory,
policy=Bag(name='custodian-works', resource_type='gcp.function'))
conf = Bag(project_id=write_project_id)
metrics = StackDriverMetrics(ctx, conf)
metrics.put_metric('ResourceCount', 43, 'Count', Scope='Policy')
metrics.flush()
|
Sutto/cloud-custodian
|
tools/c7n_gcp/tests/test_output_gcp.py
|
Python
|
apache-2.0
| 3,349
|
"""
=============================================
Effect of varying threshold for self-training
=============================================
This example illustrates the effect of a varying threshold on self-training.
The `breast_cancer` dataset is loaded, and labels are deleted such that only 50
out of 569 samples have labels. A `SelfTrainingClassifier` is fitted on this
dataset, with varying thresholds.
The upper graph shows the amount of labeled samples that the classifier has
available by the end of fit, and the accuracy of the classifier. The lower
graph shows the last iteration in which a sample was labeled. All values are
cross validated with 3 folds.
At low thresholds (in [0.4, 0.5]), the classifier learns from samples that were
labeled with a low confidence. These low-confidence samples are likely have
incorrect predicted labels, and as a result, fitting on these incorrect labels
produces a poor accuracy. Note that the classifier labels almost all of the
samples, and only takes one iteration.
For very high thresholds (in [0.9, 1)) we observe that the classifier does not
augment its dataset (the amount of self-labeled samples is 0). As a result, the
accuracy achieved with a threshold of 0.9999 is the same as a normal supervised
classifier would achieve.
The optimal accuracy lies in between both of these extremes at a threshold of
around 0.7.
"""
print(__doc__)
# Authors: Oliver Rausch <rauscho@ethz.ch>
# License: BSD
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.svm import SVC
from sklearn.model_selection import StratifiedKFold
from sklearn.semi_supervised import SelfTrainingClassifier
from sklearn.metrics import accuracy_score
from sklearn.utils import shuffle
n_splits = 3
X, y = datasets.load_breast_cancer(return_X_y=True)
X, y = shuffle(X, y, random_state=42)
y_true = y.copy()
y[50:] = -1
total_samples = y.shape[0]
base_classifier = SVC(probability=True, gamma=0.001, random_state=42)
x_values = np.arange(0.4, 1.05, 0.05)
x_values = np.append(x_values, 0.99999)
scores = np.empty((x_values.shape[0], n_splits))
amount_labeled = np.empty((x_values.shape[0], n_splits))
amount_iterations = np.empty((x_values.shape[0], n_splits))
for (i, threshold) in enumerate(x_values):
self_training_clf = SelfTrainingClassifier(base_classifier,
threshold=threshold)
# We need manual cross validation so that we don't treat -1 as a separate
# class when computing accuracy
skfolds = StratifiedKFold(n_splits=n_splits)
for fold, (train_index, test_index) in enumerate(skfolds.split(X, y)):
X_train = X[train_index]
y_train = y[train_index]
X_test = X[test_index]
y_test = y[test_index]
y_test_true = y_true[test_index]
self_training_clf.fit(X_train, y_train)
# The amount of labeled samples that at the end of fitting
amount_labeled[i, fold] = total_samples - np.unique(
self_training_clf.labeled_iter_, return_counts=True)[1][0]
# The last iteration the classifier labeled a sample in
amount_iterations[i, fold] = np.max(self_training_clf.labeled_iter_)
y_pred = self_training_clf.predict(X_test)
scores[i, fold] = accuracy_score(y_test_true, y_pred)
ax1 = plt.subplot(211)
ax1.errorbar(x_values, scores.mean(axis=1),
yerr=scores.std(axis=1),
capsize=2, color='b')
ax1.set_ylabel('Accuracy', color='b')
ax1.tick_params('y', colors='b')
ax2 = ax1.twinx()
ax2.errorbar(x_values, amount_labeled.mean(axis=1),
yerr=amount_labeled.std(axis=1),
capsize=2, color='g')
ax2.set_ylim(bottom=0)
ax2.set_ylabel('Amount of labeled samples', color='g')
ax2.tick_params('y', colors='g')
ax3 = plt.subplot(212, sharex=ax1)
ax3.errorbar(x_values, amount_iterations.mean(axis=1),
yerr=amount_iterations.std(axis=1),
capsize=2, color='b')
ax3.set_ylim(bottom=0)
ax3.set_ylabel('Amount of iterations')
ax3.set_xlabel('Threshold')
plt.show()
|
glemaitre/scikit-learn
|
examples/semi_supervised/plot_self_training_varying_threshold.py
|
Python
|
bsd-3-clause
| 4,072
|
# -*- encoding: utf-8 -*-
import pytest
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.urlresolvers import reverse
from block.models import (
BlockError,
Document,
Link,
Url,
)
from block.tests.factories import (
DocumentFactory,
LinkCategory,
LinkCategoryFactory,
LinkFactory,
PageFactory,
UrlFactory,
)
from login.tests.factories import (
TEST_PASSWORD,
UserFactory,
)
@pytest.mark.django_db
def test_category_create(client):
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
url = reverse('block.link.category.create')
data = {
'name': 'Tennis',
}
response = client.post(url, data)
# check
assert 302 == response.status_code
expect = reverse('block.link.category.list')
assert expect in response['Location']
categories = LinkCategory.objects.all()
assert 1 == categories.count()
assert 'Tennis' == categories[0].name
@pytest.mark.django_db
def test_category_delete(client):
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
category = LinkCategoryFactory()
assert category.deleted is False
# test
url = reverse('block.link.category.delete', args=[category.pk])
response = client.post(url)
# check
assert 302 == response.status_code
expect = reverse('block.link.category.list')
assert expect in response['Location']
category.refresh_from_db()
assert category.deleted is True
@pytest.mark.django_db
def test_category_delete_exception(client):
"""Should not delete a category which is in use."""
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
category = LinkCategoryFactory()
LinkFactory(category=category)
assert category.deleted is False
# test
url = reverse('block.link.category.delete', args=[category.pk])
with pytest.raises(BlockError) as e:
client.post(url)
assert 'Cannot delete a link category which is in use' in str(e.value)
@pytest.mark.django_db
def test_category_update(client):
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
category = LinkCategoryFactory()
url = reverse('block.link.category.update', args=[category.pk])
data = {
'name': 'Cricket',
}
response = client.post(url, data)
# check
assert 302 == response.status_code
expect = reverse('block.link.category.list')
assert expect in response['Location']
category.refresh_from_db()
assert 'Cricket' == category.name
@pytest.mark.django_db
def test_link_delete(client):
user = UserFactory(is_staff=True)
assert client.login(
username=user.username, password=TEST_PASSWORD
) is True
link_1 = LinkFactory()
link_2 = LinkFactory()
link_3 = LinkFactory()
url = reverse('block.link.delete', args=[link_2.pk])
data = {}
response = client.post(url, data)
# check
assert 302 == response.status_code
expect = reverse('block.link.list')
assert expect in response['Location']
result = [link.pk for link in Link.objects.links()]
assert [link_1.pk, link_3.pk] == result
@pytest.mark.django_db
def test_link_external_update(client):
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
link = LinkFactory(link_type='u', url_external="https://google.com")
url = reverse('block.link.external.update', args=[link.pk])
data = {
'title': 'Football',
'url_external': 'http://www.bbc.co.uk/sport/football'
}
response = client.post(url, data)
# check
assert 302 == response.status_code
expect = reverse('block.link.list')
assert expect in response['Location']
link.refresh_from_db()
assert 'Football' == link.title
assert 'http://www.bbc.co.uk/sport/football' == link.url
@pytest.mark.django_db
def test_link_internal_update(client):
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
link = LinkFactory(link_type='r', url_internal=UrlFactory())
url = reverse('block.link.internal.update', args=[link.pk])
page = PageFactory(slug='football', slug_menu='')
new_url = UrlFactory(url_type=Url.PAGE, page=page)
data = {
'title': 'Football',
'url_internal': new_url.pk
}
response = client.post(url, data)
# check
assert 302 == response.status_code
expect = reverse('block.link.list')
assert expect in response['Location']
link.refresh_from_db()
assert 'Football' == link.title
assert '/football/' == link.url
def test_file():
"""create a file ready to upload."""
return SimpleUploadedFile.from_dict(
dict(filename='test.txt', content=bytes('abc', 'UTF-8'))
)
@pytest.mark.django_db
def test_link_document_create(client):
category = LinkCategoryFactory()
DocumentFactory()
# image = ImageFactory()
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
url = reverse('block.link.document.create')
# create a document ready to upload
data = {
'category': category.pk,
'document': test_file(),
'title': 'Cricket',
}
response = client.post(url, data)
# check
expect = reverse('block.link.list')
assert 302 == response.status_code
assert expect in response['Location']
link = Link.objects.get(title='Cricket')
assert 'Cricket' == link.title
assert category == link.category
assert link.document.deleted is False
# check a document has been added to the database
assert 2 == Document.objects.count()
@pytest.mark.django_db
def test_link_document_update(client):
category = LinkCategoryFactory()
DocumentFactory()
link = LinkFactory(link_type=Link.DOCUMENT, document=DocumentFactory())
user = UserFactory(is_staff=True)
assert client.login(username=user.username, password=TEST_PASSWORD) is True
url = reverse('block.link.document.update', args=[link.pk])
# create a document ready to upload
data = {
'category': category.pk,
'document': test_file(),
'title': 'Cricket',
}
response = client.post(url, data)
# check
expect = reverse('block.link.list')
assert 302 == response.status_code
assert expect in response['Location']
link = Link.objects.get(title='Cricket')
assert 'Cricket' == link.title
assert category == link.category
assert 'test.txt' == link.file_name
assert link.document.deleted is False
# test for partial name only
assert "/media/link/document/test" in link.url
# check a document has been added to the database
assert 2 == Document.objects.count()
@pytest.mark.django_db
def test_link_redirect(client):
ext_url = "http://example.com"
link = LinkFactory(link_type=Link.URL_EXTERNAL, url_external=ext_url)
user = UserFactory()
assert client.login(username=user.username, password=TEST_PASSWORD)
url = reverse('block.link.follow', args=[link.link_type, link.pk])
response = client.get(url)
# check
assert 302 == response.status_code
assert ext_url in response['Location']
@pytest.mark.django_db
def test_link_redirect_invalid(client):
ext_url = "http://example.com"
link = LinkFactory(link_type=Link.URL_EXTERNAL, url_external=ext_url)
user = UserFactory()
assert client.login(username=user.username, password=TEST_PASSWORD)
# type should be 'u' in this case so 'd' will raise a 404
url = reverse('block.link.follow', args=['d', link.pk])
response = client.get(url)
# check
assert 404 == response.status_code
|
pkimber/block
|
block/tests/test_view_link_library.py
|
Python
|
apache-2.0
| 7,936
|
import os.path
import os
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from utils import log, info, warning, error
from qgis.gui import QgsMessageBar
class DialogProvider(QObject):
"""
A class to handle opening user form and creating all the required bindings
@note: There is a little too much work in this class. Needs a bit of a clean up.
"""
accepted = pyqtSignal()
rejected = pyqtSignal()
def __init__(self, canvas, iface):
QObject.__init__(self)
self.canvas = canvas
self.iface = iface
def openDialog(self, feature, layer, mandatory_fields=True):
"""
Opens a form for the given feature
@refactor: This really needs to be cleaned up.
"""
self.dialog = self.iface.getFeatureForm(layer, feature)
self.layer = layer
self.dialog.accepted.connect(self.accepted)
self.dialog.rejected.connect(self.rejected)
self.dialog.setModal(True)
fullscreen = self.dialog.property('fullscreen')
if fullscreen:
self.dialog.setWindowState(Qt.WindowFullScreen)
if self.dialog.exec_():
for value in feature.attributes():
info("New value {}".format(value))
if feature.id() > 0:
self.layer.updateFeature(feature)
else:
self.layer.addFeature(feature)
saved = self.layer.commitChanges()
if not saved:
self.iface.messageBar().pushMessage("Error",
"Error in saving changes. Contact administrator ",
QgsMessageBar.CRITICAL)
for e in self.layer.commitErrors(): error(e)
else:
self.iface.messageBar().pushMessage("Saved","Changes saved", QgsMessageBar.INFO, 2)
self.canvas.refresh()
else:
self.layer.rollBack()
self.layer.startEditing()
def selectingFromMap(self, message):
"""
Put QMap in the select feature from map mode
Hides the dialog and shows the user a message
"""
self.dialog.hide()
label = QLabel()
label.setText(message)
label.setStyleSheet('font: 75 30pt "MS Shell Dlg 2";color: rgb(231, 175, 62);')
self.item = self.canvas.scene().addWidget(label)
self.disableToolbars()
def featureSelected(self):
"""
Called once a feature has been selected. Shows the dialog back to the user.
"""
self.canvas.scene().removeItem(self.item)
self.dialog.show()
self.enableToolbars()
def moveImages(self):
""" Not currently working """
# After we commit we have to move the drawing into the correct path.
# TODO Use a custom field for the id name
# Images are saved under data/{layername}/images/{id}_{fieldname}
raise NotImplementedError
for image in self.binder.images.itervalues():
curdir = os.path.dirname(__file__)
id = self.feature.attributeMap()[self.layer.fieldNameIndex("UniqueID")].toString().toUpper()
log(id)
name = image.replace("drawingFor_", id + "_" )
imagename = os.path.join(curdir, "data", str(self.layer.name()), "images", \
os.path.basename(name))
path = os.path.dirname(imagename)
if not os.path.exists(path):
os.makedirs(path)
log(image)
log(imagename)
try:
os.rename(image, imagename)
except WindowsError, err:
os.remove(imagename)
os.rename(image, imagename)
def disableToolbars(self):
"""
Disable the toolbars in the main interface.
@refactor: Should be moved into qmap.py
"""
toolbars = self.iface.mainWindow().findChildren(QToolBar)
for toolbar in toolbars:
toolbar.setEnabled(False)
def enableToolbars(self):
"""
Enable the toolbars in the main interface.
@refactor: Should be moved into qmap.py
"""
toolbars = self.iface.mainWindow().findChildren(QToolBar)
for toolbar in toolbars:
toolbar.setEnabled(True)
|
NathanW2/qmap
|
src/qmap/dialog_provider.py
|
Python
|
gpl-2.0
| 4,539
|
"""
SpaceHub
Copyright (C) 2013 Ryan Brown <sb@ryansb.com>, Sam Lucidi <mansam@csh.rit.edu>,
Ross Delinger <rossdylan@csh.rit.edu>, Greg Jurman <jurman.greg@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from webob import Response, exc
import json
class _401(exc.HTTPError):
def __init__(self, msg='Unauthorized'):
body = {'status': 401, 'message': msg}
Response.__init__(self, json.dumps(body))
self.status = 401
self.content_type = 'application/json'
|
ryansb/spacehub
|
wsgi/spacehub/spacehub/errors.py
|
Python
|
agpl-3.0
| 1,133
|
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'ordre Virer."""
from secondaires.navigation.equipage.signaux import *
from ..ordre import *
class Virer(Ordre):
"""Ordre virer.
Cet ordre demande au matelot de faire virer le navire sur bâbord
ou tribord en fonction du besoin, jusqu'à ce qu'il soit dans une certaine
direction. Le matelot ciblé doit tenir le gouvernail.
"""
cle = "virer"
etats_autorises = ("tenir_gouvernail", )
def __init__(self, matelot, navire, direction=0):
Ordre.__init__(self, matelot, navire, direction)
self.direction = direction
def executer(self):
"""Exécute l'ordre : vire sur bâbord."""
navire = self.navire
matelot = self.matelot
personnage = matelot.personnage
salle = personnage.salle
direction = self.direction
nav_direction = navire.direction.direction
if not hasattr(salle, "gouvernail") or salle.gouvernail is None:
return
gouvernail = salle.gouvernail
if gouvernail.tenu is not personnage:
yield SignalInutile("je ne tiens pas ce gouvernail")
else:
par_babord = (nav_direction - direction) % 360
par_tribord = (direction - nav_direction) % 360
if par_tribord < par_babord:
cote = 1
else:
cote = -1
# On change d'inclinaison du gouvernail si nécessaire
direction_actuelle = round(nav_direction)
direction_voulue = round(direction)
diff = (direction_voulue - direction_actuelle) % 360
if diff > 180:
diff = 360 - diff
if diff == 0:
gouvernail.centrer(personnage)
yield SignalTermine()
elif diff < 5:
orientation = 1
elif diff < 15:
orientation = 3
else:
orientation = 5
if gouvernail.orientation != cote * orientation:
if cote == -1:
gouvernail.virer_babord(personnage, orientation, True)
else:
gouvernail.virer_tribord(personnage, orientation, True)
yield SignalRepete(1)
|
stormi/tsunami
|
src/secondaires/navigation/equipage/ordres/virer.py
|
Python
|
bsd-3-clause
| 3,807
|
from django.test import TestCase
class CollectionTests(TestCase):
pass
|
takeplace/django-composite
|
composite/tests/urls.py
|
Python
|
bsd-3-clause
| 77
|
# -*- coding: utf-8 -*-
#
# CoAPy documentation build configuration file, created by
# sphinx-quickstart on Sat Jul 17 15:46:19 2010.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'CoAPy'
copyright = u'2010, People Power Co.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0.3-DEV'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = [ '_build' ]
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_use_modindex = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = ''
# Output file base name for HTML help builder.
htmlhelp_basename = 'CoAPydoc'
# -- Options for LaTeX output --------------------------------------------------
# The paper size ('letter' or 'a4').
#latex_paper_size = 'letter'
# The font size ('10pt', '11pt' or '12pt').
#latex_font_size = '10pt'
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'CoAPy.tex', u'CoAPy Documentation',
u'Peter A. Bigot', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# Additional stuff for the LaTeX preamble.
#latex_preamble = ''
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_use_modindex = True
intersphinx_mapping = {'http://docs.python.org/': None}
autoclass_content = 'both'
|
umeckel/FS_coapy
|
doc/conf.py
|
Python
|
bsd-3-clause
| 6,468
|
# Copyright (c) 2010 Franz Allan Valencia See
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import ConfigParser
except:
import configparser as ConfigParser
from robot.api import logger
import sqlalchemy
class ConnectionManager(object):
"""
Connection Manager handles the connection & disconnection to the database.
"""
def __init__(self):
"""
Initializes _dbconnection to None.
"""
self._engine = None
self._dbconnection = None
def connect_to_database(self, url, echo=False, **kwargs):
"""
Connect to the given database URL with SQLAlchemy.
See also:
- http://docs.sqlalchemy.org/en/latest/core/engines.html#database-urls
- http://docs.sqlalchemy.org/en/rel_1_0/core/engines.html#sqlalchemy.create_engine
Example usage:
| # Connect to an in-memory SQLite database |
| Create Engine | sqlite:///:memory: |
"""
self._engine = sqlalchemy.create_engine(url, echo=echo, **kwargs)
self._dbconnection = self._engine.connect()
@property
def db_api_module_name(self):
try:
return self._engine.driver
except:
return None
def disconnect_from_database(self):
"""
Disconnects from the database.
For example:
| Disconnect From Database | # disconnects from current connection to the database |
"""
self._dbconnection.close()
|
edbrannin/Robotframework-SQLAlchemy-Library
|
src/SQLAlchemyLibrary/connection_manager.py
|
Python
|
apache-2.0
| 1,995
|
# -*- coding: utf-8 -*-
"""
@brief test log(time=10s)
"""
import unittest
import numpy
from scipy.linalg.lapack import dgelss as scipy_dgelss # pylint: disable=E0611
from pyquickhelper.pycode import ExtTestCase
from cpyquickhelper.numbers.direct_blas_lapack import dgelss # pylint: disable=E0611
from cpyquickhelper.numbers.direct_blas_lapack import cblas_ddot, cblas_sdot # pylint: disable=E0611
from cpyquickhelper.numbers.direct_blas_lapack import ( # pylint: disable=E0611
cblas_daxpy, cblas_saxpy, cblas_daxpy_void, cblas_saxpy_void)
class TestDirectBlasLapack(ExtTestCase):
def test_dgels0(self):
A = numpy.array([[1., 1.], [2., 1.], [3., 1.]])
C = numpy.array([[-1., 2.]])
B = numpy.matmul(A, C.T)
____, x, ___, __, _, info = scipy_dgelss(A, B)
self.assertEqual(x.ravel()[:2], C.ravel())
A = A.T.copy()
info = dgelss(A, B)
self.assertEqual(info, 0)
self.assertEqual(B.ravel()[:2], x.ravel()[:2])
def test_dgels01(self):
A = numpy.array([[1., 1.], [2., 1.], [3., 1.]])
C = numpy.array([[-1., 2.]])
B = numpy.matmul(A, C.T)
C[0, 0] = -0.9
____, x, ___, __, _, info = scipy_dgelss(A, B)
A = A.T.copy()
info = dgelss(A, B)
self.assertEqual(info, 0)
self.assertEqual(B.ravel()[:2], x.ravel()[:2])
def test_dgels1(self):
A = numpy.array([[10., 1.], [12., 1.], [13., 1]])
B = numpy.array([[20., 22., 23.]]).T
____, x, ___, __, _, info = scipy_dgelss(A, B)
A = A.T.copy()
info = dgelss(A, B)
self.assertEqual(info, 0)
self.assertEqual(B.ravel()[:2], x.ravel()[:2])
def test_ddot(self):
A = numpy.array([1., 2., 3.])
B = numpy.array([-1., -2.2, 3.])
dot1 = A @ B
dot2 = cblas_ddot(A, B)
self.assertAlmostEqual(dot1, dot2, delta=1e-5)
def test_sdot(self):
A = numpy.array([1., 2., 3.], dtype=numpy.float32)
B = numpy.array([-1., -2.2, 3.], dtype=numpy.float32)
dot1 = A @ B
dot2 = cblas_sdot(A, B)
self.assertAlmostEqual(dot1, dot2)
def test_daxpy(self):
A = numpy.array([1., 2., 3.], dtype=numpy.float64)
B = numpy.array([-1., -2.2, 5], dtype=numpy.float64)
C = B + A * 5
cblas_daxpy(A, B, 5)
self.assertEqualArray(C, B)
def test_saxpy(self):
A = numpy.array([1., 2., 3.], dtype=numpy.float32)
B = numpy.array([-1., -2.2, 5], dtype=numpy.float32)
C = B + A * 5
cblas_saxpy(A, B, 5)
self.assertEqualArray(C, B)
def test_daxpy_void(self):
A = numpy.array([1., 2., 3.], dtype=numpy.float64)
B = numpy.array([-1., -2.2, 5], dtype=numpy.float64)
C = B + A * 5
pA, _ = A.__array_interface__['data'] # pylint: disable=E1101
pB, _ = B.__array_interface__['data'] # pylint: disable=E1101
cblas_daxpy_void(3, pA, pB, 5)
self.assertEqualArray(C, B)
def test_saxpy_void(self):
A = numpy.array([1., 2., 3.], dtype=numpy.float32)
B = numpy.array([-1., -2.2, 5], dtype=numpy.float32)
C = B + A * 5
pA, _ = A.__array_interface__['data'] # pylint: disable=E1101
pB, _ = B.__array_interface__['data'] # pylint: disable=E1101
cblas_saxpy_void(3, pA, pB, 5)
self.assertEqualArray(C, B)
if __name__ == "__main__":
unittest.main()
|
sdpython/cpyquickhelper
|
_unittests/ut_numbers/test_direct_blas_lapack.py
|
Python
|
mit
| 3,451
|
from slackrtm.channel import Channel
import pytest
def test_Channel(channel):
assert type(channel) == Channel
@pytest.mark.xfail
def test_Channel_send_message(channel):
channel.send_message('hi')
|
llimllib/slackrtm
|
tests/test_channel.py
|
Python
|
mit
| 206
|
from datetime import timedelta, datetime
import json
import logging
from random import random
import re
import dateutil.parser
from django.apps import apps
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from django.db.models import Q, F
from django.db.models.signals import post_delete, post_save
from django.utils import timezone
from cmj.sigad.models import Documento
from cmj.signals import Manutencao
from cmj.videos.functions import pull_youtube_metadata_video, pull_youtube,\
vincular_sistema_aos_videos, video_documento_na_galeria
from cmj.videos.models import Video, PullYoutube, VideoParte, PullExec
import requests as rq
def _get_registration_key(model):
return '%s_%s' % (model._meta.app_label, model._meta.model_name)
class Command(BaseCommand):
def handle(self, *args, **options):
m = Manutencao()
post_delete.disconnect(dispatch_uid='sapl_post_delete_signal')
post_save.disconnect(dispatch_uid='sapl_post_save_signal')
post_delete.disconnect(dispatch_uid='cmj_post_delete_signal')
post_save.disconnect(dispatch_uid='cmj_post_save_signal')
self.logger = logging.getLogger(__name__)
for v in Video.objects.order_by('-id')[:2]:
print(v.id, v)
pull_youtube_metadata_video(v)
continue
for vp in v.videoparte_set.all():
d = vp.content_object
if not d:
vp.delete()
continue
if vp.content_type_id == 202:
d.delete()
vp.delete()
if not v.videoparte_set.exists():
v.delete()
continue
return
# Video.objects.all().update(created=F('modified'))
# PullYoutube.objects.pull_from_date()
PullExec.objects.timedelta_quota_pull()
# self.corrigir_erro_causado_em_full_metadata()
pull_youtube()
vincular_sistema_aos_videos()
video_documento_na_galeria()
# return
# vincular_sistema_aos_videos()
# video_documento_na_galeria()
# pull_youtube_metadata_video(Video.objects.first())
return
m.desativa_auto_now()
# self.corrigir_erro_causado_em_full_metadata()
# return
"""upcoming_or_live = Video.objects.filter(
json__snippet__liveBroadcastContent__in=('upcoming', 'live')).exists()
if upcoming_or_live:
delay = timezone.now() + timedelta(seconds=10)
task_pull_youtube.apply_async((upcoming_or_live,), eta=delay)
return"""
if not settings.DEBUG:
self.pull_youtube()
self.get_full_metadata_video()
# return
m.ativa_auto_now()
self.vincular_sistema_aos_videos()
self.video_documento_na_galeria()
def get_full_metadata_video(self):
videos = Video.objects.all(
).order_by('execucao', '-created')
# json__snippet__liveBroadcastContent__in=('upcoming', 'live')
videos = videos[:100]
#now = timezone.now()
for v in videos:
print(v.id, v.vid, v)
try:
pull_youtube_metadata_video(v)
except:
pass
"""upcoming_or_live = Video.objects.filter(
json__snippet__liveBroadcastContent__in=('upcoming', 'live'))
if upcoming_or_live.exists():
v = upcoming_or_live.first()
td = now - v.modified
if td.total_seconds() > 600:
delay = timezone.now() + timedelta(seconds=30)
task_pull_youtube.apply_async(eta=delay)"""
def corrigir_erro_causado_em_full_metadata(self):
videos = Video.objects.all()
for v in videos:
for vp in v.videoparte_set.all():
if isinstance(vp.content_object, Documento):
d = vp.content_object
# if d.classe_id == 233:
# continue
for r in d.revisoes.all():
if not r.user:
continue
if r.user.id == 76:
d.titulo = r.obj[0]['fields']['titulo']
d.descricao = r.obj[0]['fields']['descricao']
d.save()
print(r.id, r.user.id, r.user, d.id, d)
break
|
cmjatai/cmj
|
cmj/videos/management/commands/pull_youtube.py
|
Python
|
gpl-3.0
| 4,572
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '/home/yc/code/calibre/calibre/src/calibre/gui2/dialogs/search_item.ui'
#
# Created: Thu Oct 25 16:54:55 2012
# by: PyQt4 UI code generator 4.8.5
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(400, 39)
Form.setWindowTitle(_("Form"))
self.hboxlayout = QtGui.QHBoxLayout(Form)
self.hboxlayout.setObjectName(_fromUtf8("hboxlayout"))
self.field = QtGui.QComboBox(Form)
self.field.setObjectName(_fromUtf8("field"))
self.hboxlayout.addWidget(self.field)
self.label = QtGui.QLabel(Form)
self.label.setText(_("contains"))
self.label.setObjectName(_fromUtf8("label"))
self.hboxlayout.addWidget(self.label)
self.text = QtGui.QLineEdit(Form)
self.text.setToolTip(_("The text to search for. It is interpreted as a regular expression."))
self.text.setObjectName(_fromUtf8("text"))
self.hboxlayout.addWidget(self.text)
self.negate = QtGui.QCheckBox(Form)
self.negate.setToolTip(_("<p>Negate this match. That is, only return results that <b>do not</b> match this query."))
self.negate.setText(_("Negate"))
self.negate.setObjectName(_fromUtf8("negate"))
self.hboxlayout.addWidget(self.negate)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
pass
|
yeyanchao/calibre
|
src/calibre/gui2/dialogs/search_item_ui.py
|
Python
|
gpl-3.0
| 1,707
|
from django.contrib.auth.decorators import user_passes_test
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.urlresolvers import reverse_lazy
def sadmin_prerequisites(function):
actual_decorator = user_passes_test(
lambda u: u.is_authenticated() and u.is_staff and u.is_superuser,
login_url=reverse_lazy('sadmin2:login'),
redirect_field_name=REDIRECT_FIELD_NAME
)
return actual_decorator(function)
|
animekita/selvbetjening
|
selvbetjening/sadmin2/decorators.py
|
Python
|
mit
| 457
|
class TreeAsBin:
def __init__(self, key, child = None, sibling = None):
self.key = key
self.child = child
self.sibling = sibling
|
omar94250/Algo-Epita
|
TreeAsBin.py
|
Python
|
gpl-3.0
| 158
|
""" Serial communication with Korad KA3xxxP power supplies.
The intent is to give easy access to the power supply as Python objects, eliminating the need to know
special codes.
The object supports the python `with` statement to release the serial port automatically:
from koradserial import KoradSerial
with KoradSerial('/dev/tty.usbmodemfd121') as device:
print "Model: ", device.model
print "Status: ", device.status
LICENSE: MIT
RESOURCES:
http://www.eevblog.com/forum/testgear/power-supply-ps3005d-ka3005d-rs232-protocol/
http://www.eevblog.com/forum/testgear/korad-ka3005p-io-commands/
http://sigrok.org/wiki/Velleman_PS3005D
https://gist.github.com/k-nowicki/5379272
"""
from __future__ import print_function, unicode_literals
from enum import Enum
from time import sleep
import serial
__all__ = ['KoradSerial', 'ChannelMode', 'OnOffState', 'Tracking']
class ChannelMode(Enum):
""" Represents channel modes.
These values should correspond to the values returned by the ``STATUS?`` command.
"""
constant_current = 0
constant_voltage = 1
class OnOffState(Enum):
""" Represents on/off states.
This could just as easily be done as a Boolean, but is explicit.
"""
off = 0
on = 1
class Tracking(Enum):
""" Tracking state for a multi-channel power supply.
These values should correspond to the values returned by the ``STATUS?`` command.
There seems to be conflicting information about these values.
The other values I've seen are:
* 0 - independent
* 1 - series
* 2 - parallel
* 3 - symmetric
However, I don't have a multi-channel power supply to test these.
"""
independent = 0
series = 1
parallel = 3
class Status(object):
""" Decode the KoradSerial status byte.
It appears that the firmware is a little wonky here.
SOURCE:
Taken from http://www.eevblog.com/forum/testgear/korad-ka3005p-io-commands/
Contents 8 bits in the following format
Bit Item Description
0 CH1 0=CC mode, 1=CV mode
1 CH2 0=CC mode, 1=CV mode
2, 3 Tracking 00=Independent, 01=Tracking series,11=Tracking parallel
4 Beep 0=Off, 1=On
5 Lock 0=Lock, 1=Unlock
6 Output 0=Off, 1=On
7 N/A N/A
"""
def __init__(self, status):
""" Initialize object with a KoradSerial status character.
:param status: Status value
:type status: int
"""
super(Status, self).__init__()
self.raw = status
self.channel1 = ChannelMode(status & 1)
self.channel2 = ChannelMode((status >> 1) & 1)
self.tracking = Tracking((status >> 2) & 3)
self.beep = OnOffState((status >> 4) & 1)
self.lock = OnOffState((status >> 5) & 1)
self.output = OnOffState((status >> 6) & 1)
def __repr__(self):
return "{0}".format(self.raw)
def __str__(self):
return "Channel 1: {0}, Channel 2: {1}, Tracking: {2}, Beep: {3}, Lock: {4}, Output: {5}".format(
self.channel1.name,
self.channel2.name,
self.tracking.name,
self.beep.name,
self.lock.name,
self.output.name,
)
def __unicode__(self):
return self.__str__()
def float_or_none(value):
try:
return float(value)
except (TypeError, ValueError):
return None
class KoradSerial(object):
""" Wrapper for communicating with a programmable KoradSerial KA3xxxxP power supply as a serial interface.
"""
class Channel(object):
""" Wrap a channel. """
def __init__(self, serial_, channel_number):
"""
:type serial_: KoradSerial.Serial
:type channel_number: int
"""
super(KoradSerial.Channel, self).__init__()
self.__serial = serial_
self.number = channel_number
@property
def current(self):
result = self.__serial.send_receive("ISET{0}?".format(self.number), fixed_length=6)
# There's a bug that return a 6th character of previous output.
# This has to be read and discarded otherwise it will be prepended to the next output
return float_or_none(result[:5])
@current.setter
def current(self, value):
self.__serial.send("ISET{0}:{1:05.3f}".format(self.number, value))
@property
def voltage(self):
return float_or_none(self.__serial.send_receive("VSET{0}?".format(self.number), fixed_length=5))
@voltage.setter
def voltage(self, value):
self.__serial.send("VSET{0}:{1:05.2f}".format(self.number, value))
@property
def output_current(self):
""" Retrieve this channel's current current output.
:return: Amperes
:rtype: float or None
"""
result = self.__serial.send_receive("IOUT{0}?".format(self.number), fixed_length=5)
return float_or_none(result)
@property
def output_voltage(self):
""" Retrieve this channel's current current voltage.
:return: Volts
:rtype: float or None
"""
result = self.__serial.send_receive("VOUT{0}?".format(self.number), fixed_length=5)
return float_or_none(result)
class Memory(object):
""" Wrap a memory setting. """
def __init__(self, serial_, memory_number):
super(KoradSerial.Memory, self).__init__()
self.__serial = serial_
self.number = memory_number
def recall(self):
""" Recall this memory's settings. """
self.__serial.send("RCL{0}".format(self.number))
def save(self):
""" Save the current voltage and current to this memory. """
self.__serial.send("SAV{0}".format(self.number))
class OnOffButton(object):
""" Wrap an off/off button. """
def __init__(self, serial_, on_command, off_command):
super(KoradSerial.OnOffButton, self).__init__()
self.__serial = serial_
self._on = on_command
self._off = off_command
def on(self):
self.__serial.send(self._on)
def off(self):
self.__serial.send(self._off)
class Serial(object):
""" Serial operations.
There are some quirky things in communication. They go here.
"""
def __init__(self, port, debug=False):
super(KoradSerial.Serial, self).__init__()
self.debug = debug
self.port = serial.Serial(port, 9600, timeout=1)
def read_character(self):
c = self.port.read(1).decode('ascii')
if self.debug:
if len(c) > 0:
print("read: {0} = '{1}'".format(ord(c), c))
else:
print("read: timeout")
return c
def read_string(self, fixed_length=None):
""" Read a string.
It appears that the KoradSerial PSU returns zero-terminated strings.
:return: str
"""
result = []
c = self.read_character()
while len(c) > 0 and ord(c) != 0:
result.append(c)
if fixed_length is not None and len(result) == fixed_length:
break
c = self.read_character()
return ''.join(result)
def send(self, text):
if self.debug:
print("_send: ", text)
sleep(0.1)
self.port.write(text.encode('ascii'))
def send_receive(self, text, fixed_length=None):
self.send(text)
return self.read_string(fixed_length)
def __init__(self, port, debug=False):
super(KoradSerial, self).__init__()
self.__serial = KoradSerial.Serial(port, debug)
# Channels: adjust voltage and current, discover current output voltage.
self.channels = [KoradSerial.Channel(self.__serial, i) for i in range(1, 3)]
# Memory recall/save buttons 1 through 5
self.memories = [KoradSerial.Memory(self.__serial, i) for i in range(1, 6)]
# Second column buttons
self.beep = KoradSerial.OnOffButton(self.__serial, "BEEP1", "BEEP0")
self.output = KoradSerial.OnOffButton(self.__serial, "OUT1", "OUT0")
self.over_current_protection = KoradSerial.OnOffButton(self.__serial, "OCP1", "OCP0")
self.over_voltage_protection = KoradSerial.OnOffButton(self.__serial, "OVP1", "OVP0")
def __enter__(self):
""" See documentation for Python's ``with`` command.
"""
return self
def __exit__(self, type, value, traceback):
""" See documentation for Python's ``with`` command.
"""
self.close()
return False
# ################################################################################
# Serial operations
# ################################################################################
@property
def is_open(self):
""" Report whether the serial port is open.
:rtype: bool
"""
return self.__serial.port.isOpen()
def close(self):
""" Close the serial port """
self.__serial.port.close()
def open(self):
""" Open the serial port """
self.__serial.port.open()
# ################################################################################
# Power supply operations
# ################################################################################
@property
def model(self):
""" Report the power supply model information.
:rtype: str
"""
return self.__serial.send_receive("*IDN?")
@property
def status(self):
""" Report the power supply status.
:rtype: KoradSerial.Status or None
"""
self.__serial.send("STATUS?")
status = self.__serial.read_character()
if len(status) == 0:
return None
else:
return Status(ord(status))
def track(self, value):
""" Set tracking mode.
This does nothing on single-channel power supply.
:param value: Tracking mode to set.
:type value: Tracking
"""
translate = {
Tracking.independent: "TRACK0",
Tracking.series: "TRACK1",
Tracking.parallel: "TRACK2",
}
if value in translate:
self.__serial.send(translate[value])
|
starforgelabs/py-korad-serial
|
koradserial.py
|
Python
|
mit
| 10,720
|
import _plotly_utils.basevalidators
class TextsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="textsrc", parent_name="histogram", **kwargs):
super(TextsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
plotly/python-api
|
packages/python/plotly/plotly/validators/histogram/_textsrc.py
|
Python
|
mit
| 440
|
from __future__ import absolute_import
from __future__ import with_statement
from mock import Mock, patch
from celery import Celery
from celery.bin.camqadm import (
AMQPAdmin,
AMQShell,
dump_message,
AMQPAdminCommand,
camqadm,
main,
)
from celery.tests.utils import AppCase, WhateverIO
class test_AMQShell(AppCase):
def setup(self):
self.fh = WhateverIO()
self.app = Celery(broker='memory://', set_as_current=False)
self.adm = self.create_adm()
self.shell = AMQShell(connect=self.adm.connect, out=self.fh)
def create_adm(self, *args, **kwargs):
return AMQPAdmin(app=self.app, out=self.fh, *args, **kwargs)
def test_queue_declare(self):
self.shell.onecmd('queue.declare foo')
self.assertIn('ok', self.fh.getvalue())
def test_missing_command(self):
self.shell.onecmd('foo foo')
self.assertIn('unknown syntax', self.fh.getvalue())
def RV(self):
raise Exception(self.fh.getvalue())
def test_missing_namespace(self):
self.shell.onecmd('ns.cmd arg')
self.assertIn('unknown syntax', self.fh.getvalue())
def test_help(self):
self.shell.onecmd('help')
self.assertIn('Example:', self.fh.getvalue())
def test_help_command(self):
self.shell.onecmd('help queue.declare')
self.assertIn('passive:no', self.fh.getvalue())
def test_help_unknown_command(self):
self.shell.onecmd('help foo.baz')
self.assertIn('unknown syntax', self.fh.getvalue())
def test_exit(self):
with self.assertRaises(SystemExit):
self.shell.onecmd('exit')
self.assertIn("don't leave!", self.fh.getvalue())
def test_note_silent(self):
self.shell.silent = True
self.shell.note('foo bar')
self.assertNotIn('foo bar', self.fh.getvalue())
def test_reconnect(self):
self.shell.onecmd('queue.declare foo')
self.shell.needs_reconnect = True
self.shell.onecmd('queue.delete foo')
def test_completenames(self):
self.assertEqual(
self.shell.completenames('queue.dec'),
['queue.declare'],
)
self.assertEqual(
self.shell.completenames('declare'),
['queue.declare', 'exchange.declare'],
)
def test_empty_line(self):
self.shell.emptyline = Mock()
self.shell.default = Mock()
self.shell.onecmd('')
self.shell.emptyline.assert_called_with()
self.shell.onecmd('foo')
self.shell.default.assert_called_with('foo')
def test_respond(self):
self.shell.respond({'foo': 'bar'})
self.assertIn('foo', self.fh.getvalue())
def test_prompt(self):
self.assertTrue(self.shell.prompt)
def test_no_returns(self):
self.shell.onecmd('queue.declare foo')
self.shell.onecmd('exchange.declare bar direct yes')
self.shell.onecmd('queue.bind foo bar baz')
self.shell.onecmd('basic.ack 1')
def test_dump_message(self):
m = Mock()
m.body = 'the quick brown fox'
m.properties = {'a': 1}
m.delivery_info = {'exchange': 'bar'}
self.assertTrue(dump_message(m))
def test_dump_message_no_message(self):
self.assertIn('No messages in queue', dump_message(None))
def test_note(self):
self.adm.silent = True
self.adm.note('FOO')
self.assertNotIn('FOO', self.fh.getvalue())
def test_run(self):
a = self.create_adm('queue.declare foo')
a.run()
self.assertIn('ok', self.fh.getvalue())
def test_run_loop(self):
a = self.create_adm()
a.Shell = Mock()
shell = a.Shell.return_value = Mock()
shell.cmdloop = Mock()
a.run()
shell.cmdloop.assert_called_with()
shell.cmdloop.side_effect = KeyboardInterrupt()
a.run()
self.assertIn('bibi', self.fh.getvalue())
@patch('celery.bin.camqadm.AMQPAdminCommand')
def test_main(self, Command):
c = Command.return_value = Mock()
main()
c.execute_from_commandline.assert_called_with()
@patch('celery.bin.camqadm.AMQPAdmin')
def test_camqadm(self, cls):
c = cls.return_value = Mock()
camqadm()
c.run.assert_called_with()
@patch('celery.bin.camqadm.AMQPAdmin')
def test_AMQPAdminCommand(self, cls):
c = cls.return_value = Mock()
camqadm()
c.run.assert_called_with()
x = AMQPAdminCommand(app=self.app)
x.run()
self.assertIs(cls.call_args[1]['app'], self.app)
c.run.assert_called_with()
|
mozilla/firefox-flicks
|
vendor-local/lib/python/celery/tests/bin/test_camqadm.py
|
Python
|
bsd-3-clause
| 4,659
|
import re
import sys
if sys.version_info < (3,):
from urllib2 import urlopen, Request
from urlparse import urljoin
else:
from urllib.request import urlopen, Request
from urllib.parse import urljoin
class InvalidID(Exception):
pass
class InvalidHost(Exception):
pass
class BaseExtractor(object):
def __init__(self):
self.regex_url = None
self.host_list = None
self.holder_url = None
self.regex_url = None
self.example_urls = None
def is_valid_host(self, host):
return host in self.host_list
def is_valid_url(self, url):
return re.match(self.regex_url, url)
def get_id(self, url):
if self.is_valid_url(url):
try:
re.match(self.regex_url, url).group('id')
except:
raise InvalidID('Not a valid id')
def get_host(self, url):
if self.is_valid_url(url):
try:
re.match(self.regex_url, url).group('host')
except:
raise InvalidHost('Not a valid host')
@property
def name(self):
return self.__class__.__name__.lower()
def __str__(self):
s = "{0}(name={1}, host_list={2})".format(
self.__class__.__name__,
self.name,
self.host_list
)
return s
def fetch_page(self, url, extra_path=None):
""" Download page using default user agent, read it and return its content
If extra_path is given, it appends this path to url before request
"""
if extra_path:
url = urljoin(url, extra_path)
user_agent = 'Mozilla/5.0 (X11; Linux x86_64; rv:24.0) Gecko/20100101 Firefox/24.0'
headers = {'User-Agent': user_agent}
req = Request(url, data=None, headers=headers)
response = urlopen(req)
content = response.read()
return content
|
marcwebbie/pycis
|
pycis/extractors/base_extractor.py
|
Python
|
mit
| 1,922
|
from django.core.urlresolvers import reverse
def test_role_merge_page(admin_webtest_client, factories):
role = factories.RoleFactory()
url = reverse('admin:role-merge', kwargs={
'department_pk': role.department_id,
'pk': role.pk,
})
response = admin_webtest_client.get(url)
assert response.status_code == 200
def test_role_merging(admin_webtest_client, factories, models):
role_a = factories.RoleFactory(name='a')
factories.ShiftFactory(role=role_a)
role_b = factories.RoleFactory(name='b')
factories.ShiftFactory(role=role_b)
assert role_a != role_b
# sanity check
assert role_a.shifts.count() == 1
assert role_b.shifts.count() == 1
url = reverse('admin:role-merge', kwargs={
'department_pk': role_a.department_id,
'pk': role_a.pk,
})
response = admin_webtest_client.get(url)
response.forms[1]['role'] = role_b.pk
response.forms[1]['verify'] = role_a.name
form_response = response.forms[1].submit()
assert form_response.status_code == 302
assert not models.Role.objects.filter(pk=role_a.pk).exists()
assert role_b.shifts.count() == 2
|
Apogaea/voldb
|
tests/departments/admin/test_role_merge_page.py
|
Python
|
gpl-3.0
| 1,165
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import sys
import time
import traceback
import logging
from datetime import datetime, timedelta
import pymongo
from pylons import tmpl_context as c, app_globals as g
from tg import config
from paste.deploy.converters import asbool
import ming
from ming.utils import LazyProperty
from ming import schema as S
from ming.orm import session, FieldProperty
from ming.orm.declarative import MappedClass
from allura.lib.helpers import log_output
from .session import task_orm_session
log = logging.getLogger(__name__)
class MonQTask(MappedClass):
'''Task to be executed by the taskd daemon.
Properties
- _id - bson.ObjectId() for this task
- state - 'ready', 'busy', 'error', 'complete', or 'skipped' task status
- priority - integer priority, higher is more priority
- result_type - either 'keep' or 'forget', what to do with the task when
it's done
- time_queue - time the task was queued
- time_start - time taskd began working on the task
- time_stop - time taskd stopped working on the task
- task_name - full dotted name of the task function to run
- process - identifier for which taskd process is working on the task
- context - values used to set c.project, c.app, c.user for the task
- args - ``*args`` to be sent to the task function
- kwargs - ``**kwargs`` to be sent to the task function
- result - if the task is complete, the return value. If in error, the traceback.
'''
states = ('ready', 'busy', 'error', 'complete', 'skipped')
result_types = ('keep', 'forget')
class __mongometa__:
session = task_orm_session
name = 'monq_task'
indexes = [
[
# used in MonQTask.get() method
# also 'state' queries exist in several other methods
('state', ming.ASCENDING),
('priority', ming.DESCENDING),
('time_queue', ming.ASCENDING)
],
[
# used by SF internal tool, but could be generally useful to
# have an index on task_name
'state', 'task_name', 'time_queue'
],
'args',
]
_id = FieldProperty(S.ObjectId)
state = FieldProperty(S.OneOf(*states))
priority = FieldProperty(int)
result_type = FieldProperty(S.OneOf(*result_types))
time_queue = FieldProperty(datetime, if_missing=datetime.utcnow)
time_start = FieldProperty(datetime, if_missing=None)
time_stop = FieldProperty(datetime, if_missing=None)
task_name = FieldProperty(str)
process = FieldProperty(str)
context = FieldProperty(dict(
project_id=S.ObjectId,
app_config_id=S.ObjectId,
user_id=S.ObjectId,
notifications_disabled=bool))
args = FieldProperty([])
kwargs = FieldProperty({None: None})
result = FieldProperty(None, if_missing=None)
def __repr__(self):
from allura import model as M
project = M.Project.query.get(_id=self.context.project_id)
app = None
if project:
app_config = M.AppConfig.query.get(_id=self.context.app_config_id)
if app_config:
app = project.app_instance(app_config)
user = M.User.query.get(_id=self.context.user_id)
project_url = project and project.url() or None
app_mount = app and app.config.options.mount_point or None
username = user and user.username or None
return '<%s %s (%s) P:%d %s %s project:%s app:%s user:%s>' % (
self.__class__.__name__,
self._id,
self.state,
self.priority,
self.task_name,
self.process,
project_url,
app_mount,
username)
@LazyProperty
def function(self):
'''The function that is called by this task'''
smod, sfunc = self.task_name.rsplit('.', 1)
cur = __import__(smod, fromlist=[sfunc])
return getattr(cur, sfunc)
@classmethod
def post(cls,
function,
args=None,
kwargs=None,
result_type='forget',
priority=10,
delay=0):
'''Create a new task object based on the current context.'''
if args is None:
args = ()
if kwargs is None:
kwargs = {}
task_name = '%s.%s' % (
function.__module__,
function.__name__)
context = dict(
project_id=None,
app_config_id=None,
user_id=None,
notifications_disabled=False)
if getattr(c, 'project', None):
context['project_id'] = c.project._id
context[
'notifications_disabled'] = c.project.notifications_disabled
if getattr(c, 'app', None):
context['app_config_id'] = c.app.config._id
if getattr(c, 'user', None):
context['user_id'] = c.user._id
obj = cls(
state='ready',
priority=priority,
result_type=result_type,
task_name=task_name,
args=args,
kwargs=kwargs,
process=None,
result=None,
context=context,
time_queue=datetime.utcnow() + timedelta(seconds=delay))
session(obj).flush(obj)
try:
if g.amq_conn:
g.amq_conn.queue.put('')
except:
log.warning('Error putting to amq_conn', exc_info=True)
return obj
@classmethod
def get(cls, process='worker', state='ready', waitfunc=None, only=None):
'''Get the highest-priority, oldest, ready task and lock it to the
current process. If no task is available and waitfunc is supplied, call
the waitfunc before trying to get the task again. If waitfunc is None
and no tasks are available, return None. If waitfunc raises a
StopIteration, stop waiting for a task
'''
sort = [
('priority', ming.DESCENDING),
('time_queue', ming.ASCENDING)]
while True:
try:
query = dict(state=state)
query['time_queue'] = {'$lte': datetime.utcnow()}
if only:
query['task_name'] = {'$in': only}
obj = cls.query.find_and_modify(
query=query,
update={
'$set': dict(
state='busy',
process=process)
},
new=True,
sort=sort)
if obj is not None:
return obj
except pymongo.errors.OperationFailure, exc:
if 'No matching object found' not in exc.args[0]:
raise
if waitfunc is None:
return None
try:
waitfunc()
except StopIteration:
return None
@classmethod
def timeout_tasks(cls, older_than):
'''Mark all busy tasks older than a certain datetime as 'ready' again.
Used to retry 'stuck' tasks.'''
spec = dict(state='busy')
spec['time_start'] = {'$lt': older_than}
cls.query.update(spec, {'$set': dict(state='ready')}, multi=True)
@classmethod
def clear_complete(cls):
'''Delete the task objects for complete tasks'''
spec = dict(state='complete')
cls.query.remove(spec)
@classmethod
def run_ready(cls, worker=None):
'''Run all the tasks that are currently ready'''
i = 0
for i, task in enumerate(cls.query.find(dict(state='ready')).all()):
task.process = worker
task()
return i
def __call__(self, restore_context=True):
'''Call the task function with its context. If restore_context is True,
c.project/app/user will be restored to the values they had before this
function was called.
'''
from allura import model as M
self.time_start = datetime.utcnow()
session(self).flush(self)
log.info('starting %r', self)
old_cproject = getattr(c, 'project', None)
old_capp = getattr(c, 'app', None)
old_cuser = getattr(c, 'user', None)
try:
func = self.function
c.project = M.Project.query.get(_id=self.context.project_id)
c.app = None
if c.project:
c.project.notifications_disabled = self.context.get(
'notifications_disabled', False)
app_config = M.AppConfig.query.get(
_id=self.context.app_config_id)
if app_config:
c.app = c.project.app_instance(app_config)
c.user = M.User.query.get(_id=self.context.user_id)
with log_output(log):
self.result = func(*self.args, **self.kwargs)
self.state = 'complete'
return self.result
except Exception, exc:
if asbool(config.get('monq.raise_errors')):
raise
else:
log.exception('Error "%s" on job %s', exc, self)
self.state = 'error'
if hasattr(exc, 'format_error'):
self.result = exc.format_error()
log.error(self.result)
else:
self.result = traceback.format_exc()
finally:
self.time_stop = datetime.utcnow()
session(self).flush(self)
if restore_context:
c.project = old_cproject
c.app = old_capp
c.user = old_cuser
def join(self, poll_interval=0.1):
'''Wait until this task is either complete or errors out, then return the result.'''
while self.state not in ('complete', 'error'):
time.sleep(poll_interval)
self.query.find(dict(_id=self._id), refresh=True).first()
return self.result
@classmethod
def list(cls, state='ready'):
'''Print all tasks of a certain status to sys.stdout. Used for debugging.'''
for t in cls.query.find(dict(state=state)):
sys.stdout.write('%r\n' % t)
|
apache/incubator-allura
|
Allura/allura/model/monq_model.py
|
Python
|
apache-2.0
| 11,231
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path
from spack import *
class Dyninst(CMakePackage):
"""API for dynamic binary instrumentation. Modify programs while they
are executing without recompiling, re-linking, or re-executing."""
homepage = "https://dyninst.org"
git = "https://github.com/dyninst/dyninst.git"
maintainers = ['hainest']
tags = ['e4s']
version('master', branch='master')
version('12.0.1', tag='v12.0.1')
version('12.0.0', tag='v12.0.0')
version('11.0.1', tag='v11.0.1')
version('11.0.0', tag='v11.0.0')
version('10.2.1', tag='v10.2.1')
version('10.2.0', tag='v10.2.0')
version('10.1.0', tag='v10.1.0')
version('10.0.0', tag='v10.0.0')
version('9.3.2', tag='v9.3.2')
version('9.3.0', tag='v9.3.0')
version('9.2.0', tag='v9.2.0')
version('9.1.0', tag='v9.1.0')
version('8.2.1', tag='v8.2.1')
variant('openmp', default=True,
description='Enable OpenMP support for ParseAPI '
'(version 10.0.0 or later)')
variant('static', default=False,
description='Build static libraries')
variant('stat_dysect', default=False,
description="Patch for STAT's DySectAPI")
boost_libs = '+atomic+chrono+date_time+filesystem+system+thread+timer'
depends_on('boost@1.61.0:' + boost_libs, when='@10.1.0:')
depends_on('boost@1.61.0:1.69' + boost_libs, when='@:10.0')
depends_on('boost@1.67.0:' + boost_libs, when='@11.0.0:')
depends_on('libiberty+pic')
# Dyninst uses elfutils starting with 9.3.0, and used libelf
# before that.
# NB: Parallel DWARF parsing in Dyninst 10.2.0 requires a thread-
# safe libdw
depends_on('elfutils@0.186:', type='link', when='@12.0.1:')
depends_on('elfutils@0.178:', type='link', when='@10.2.0:')
depends_on('elfutils', type='link', when='@9.3.0:10.1')
depends_on('libelf', type='link', when='@:9.2')
# Dyninst uses libdw from elfutils starting with 10.0, and used
# libdwarf before that.
depends_on('libdwarf', when='@:9')
depends_on('tbb@2018.6:', when='@10.0.0:')
depends_on('cmake@3.4.0:', type='build', when='@10.1.0:')
depends_on('cmake@3.0.0:', type='build', when='@10.0.0:10.0')
depends_on('cmake@2.8:', type='build', when='@:9')
patch('stat_dysect.patch', when='+stat_dysect')
patch('stackanalysis_h.patch', when='@9.2.0')
patch('v9.3.2-auto.patch', when='@9.3.2 %gcc@:4.7')
patch('tribool.patch', when='@9.3.0:10.0.0 ^boost@1.69:')
# No Mac support (including apple-clang)
conflicts('platform=darwin', msg='macOS is not supported')
# We currently only build with gcc
conflicts('%clang')
conflicts('%arm')
conflicts('%cce')
conflicts('%fj')
conflicts('%intel')
conflicts('%pgi')
conflicts('%xl')
conflicts('%xl_r')
# Version 11.0 requires a C++11-compliant ABI
conflicts('%gcc@:5', when='@11.0.0:')
# Versions 9.3.x used cotire, but have no knob to turn it off.
# Cotire has no real use for one-time builds and can break
# parallel builds with both static and shared libs.
@when('@9.3.0:9.3')
def patch(self):
filter_file('USE_COTIRE true', 'USE_COTIRE false',
'cmake/shared.cmake')
# New style cmake args, starting with 10.1.
@when('@10.1.0:')
def cmake_args(self):
spec = self.spec
args = [
'-DBoost_ROOT_DIR=%s' % spec['boost'].prefix,
'-DElfUtils_ROOT_DIR=%s' % spec['elf'].prefix,
'-DLibIberty_ROOT_DIR=%s' % spec['libiberty'].prefix,
'-DTBB_ROOT_DIR=%s' % spec['tbb'].prefix,
self.define('LibIberty_LIBRARIES', spec['libiberty'].libs)
]
if '+openmp' in spec:
args.append('-DUSE_OpenMP=ON')
else:
args.append('-DUSE_OpenMP=OFF')
if '+static' in spec:
args.append('-DENABLE_STATIC_LIBS=YES')
else:
args.append('-DENABLE_STATIC_LIBS=NO')
# Make sure Dyninst doesn't try to build its own dependencies
# outside of Spack
if spec.satisfies('@10.2.0:'):
args.append('-DSTERILE_BUILD=ON')
return args
# Old style cmake args, up through 10.0.
@when('@:10.0')
def cmake_args(self):
spec = self.spec
# Elf -- the directory containing libelf.h.
elf = spec['elf'].prefix
elf_include = os.path.dirname(
find_headers('libelf', elf.include, recursive=True)[0])
# Dwarf -- the directory containing elfutils/libdw.h or
# libdwarf.h, and the path to libdw.so or libdwarf.so.
if spec.satisfies('@10.0.0:'):
dwarf_include = elf.include
dwarf_lib = find_libraries('libdw', elf, recursive=True)
else:
dwarf_include = spec['libdwarf'].prefix.include
dwarf_lib = spec['libdwarf'].libs
args = [
'-DPATH_BOOST=%s' % spec['boost'].prefix,
'-DIBERTY_LIBRARIES=%s' % spec['libiberty'].libs,
'-DLIBELF_INCLUDE_DIR=%s' % elf_include,
'-DLIBELF_LIBRARIES=%s' % spec['elf'].libs,
'-DLIBDWARF_INCLUDE_DIR=%s' % dwarf_include,
'-DLIBDWARF_LIBRARIES=%s' % dwarf_lib,
]
# TBB include and lib directories, version 10.x or later.
if spec.satisfies('@10.0.0:'):
args.extend([
'-DTBB_INCLUDE_DIRS=%s' % spec['tbb'].prefix.include,
'-DTBB_LIBRARY=%s' % spec['tbb'].prefix.lib,
])
# Openmp applies to version 10.x or later.
if spec.satisfies('@10.0.0:'):
if '+openmp' in spec:
args.append('-DUSE_OpenMP=ON')
else:
args.append('-DUSE_OpenMP=OFF')
# Static libs started with version 9.1.0.
if spec.satisfies('@9.1.0:'):
if '+static' in spec:
args.append('-DENABLE_STATIC_LIBS=1')
else:
args.append('-DENABLE_STATIC_LIBS=NO')
return args
|
LLNL/spack
|
var/spack/repos/builtin/packages/dyninst/package.py
|
Python
|
lgpl-2.1
| 6,257
|
import pandas as pd
import numpy as np
from datetime import datetime
from datetime import timedelta
import math
from utility.datafilepath import g_singletonDataFilePath
class GenerateResultCsv:
def __init__(self):
return
def generateTestDate_0(self):
startDate = datetime.strptime('2016-01-01', '%Y-%m-%d')
res = []
for i in range(21):
deltatime = timedelta(days = i)
item = (startDate + deltatime).date()
res.append(str(item))
return res
def generateTestDate_1(self):
startDate = datetime.strptime('2016-01-22', '%Y-%m-%d')
res = []
for i in range(5):
deltatime = timedelta(days = 2*i)
item = (startDate + deltatime).date()
res.append(str(item))
return res
def generateSlotSet(self, testDates, slots):
res = []
for testDate in testDates:
for slot in slots:
res.append(testDate + '-'+ str(slot))
return res
def generateSlotSet_0(self):
testDates = self.generateTestDate_0()
slots = [46,58,70,82,94,106,118,130,142]
return self.generateSlotSet(testDates, slots)
def generateSlotSet_1(self):
testDates = self.generateTestDate_1()
slots = [46,58,70,82,94,106,118,130,142]
return self.generateSlotSet(testDates, slots)
def generateTestDistrict(self):
return [i+ 1 for i in range(66)]
def generatePrediction_0(self):
testSlots = self.generateSlotSet_0()
df = pd.read_csv(g_singletonDataFilePath.getGapCsv_Train())
df = df.loc[df['time_slotid'].isin(testSlots)]
self.saveResultCsv(df, 'prediction_0.csv')
#map 2016-01-22-1 to 2016-01-22-001
# df['timeslotrank'] = df['time_slotid'].map(lambda x: "-".join(x.split('-')[:3] + [x.split('-')[-1].zfill(3)]))
# df = df.sort_values(by = ['timeslotrank','start_district_id'])
# df.to_csv('prediction_0.csv', columns=['start_district_id', 'time_slotid', 'gap'], header=None, index=None)
return
def saveResultCsv(self, df, filename):
#map 2016-01-22-1 to 2016-01-22-001
df['timeslotrank'] = df['time_slotid'].map(lambda x: "-".join(x.split('-')[:3] + [x.split('-')[-1].zfill(3)]))
df = df.sort_values(by = ['start_district_id', 'timeslotrank'])
df.to_csv(filename, columns=['start_district_id', 'time_slotid', 'gap'], header=None, index=None)
return
def generateActual_0(self):
testSlots = self.generateSlotSet_0()
df = pd.read_csv(g_singletonDataFilePath.getGapCsv_Train())
df = df.loc[df['time_slotid'].isin(testSlots)]
self.saveResultCsv(df, 'actual_0.csv')
return
class Evaluate(GenerateResultCsv):
def __init__(self):
GenerateResultCsv.__init__(self)
return
def loadResultFiles(self, testSetNum):
actualFile = 'actual_' + str(testSetNum) + '.csv'
predictionFile = 'prediction_' + str(testSetNum) + '.csv'
self.actualDict = self.loadDict(actualFile)
self.predictonDict = self.loadDict(predictionFile)
return
def loadDict(self, filename):
df = pd.read_csv(filename, header=None)
res = {}
for _, row in df.iterrows():
res[(row[0], row[1])] = row[2]
return res
def calFinalResult(self, testSetNum):
self.loadResultFiles(testSetNum)
res = []
for key, value in self.actualDict.iteritems():
actual = value
if actual == 0:
print "record {} is 0, not included in final calculation".format(key)
continue
prediction = self.predictonDict[key]
temp = (actual - prediction)/float(actual)
if math.isnan(temp):
print temp
res.append(abs(temp))
res = np.array(res)
pd.DataFrame(res).to_csv('result.csv')
print "final result: {}".format(res.mean())
return np.mean(res)
def run(self):
self.generateActual_0()
self.generatePrediction_0()
self.calFinalResult(0)
return
if __name__ == "__main__":
obj= Evaluate()
obj.run()
|
LevinJ/Supply-demand-forecasting
|
evaluation/evaluate.py
|
Python
|
mit
| 4,270
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = [
'transitions'
# TODO: put package requirements here
]
test_requirements = [
'pip',
'flake8',
'pytest',
'mock',
'six',
'transitions'
# TODO: put package test requirements here
]
setup(
name='test_manager',
version='0.1.1',
description="GUI Test manage to run multiple test plans",
long_description=readme + '\n\n' + history,
author="Matt Trott",
author_email='trottmpq@gmail.com',
url='https://github.com/trottmpq/test_manager',
packages=[
'test_manager',
],
package_dir={'test_manager':
'test_manager'},
entry_points={
'console_scripts': [
'test_manager=test_manager.cli:main'
]
},
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='test_manager',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
tests_require=test_requirements
)
|
trottmpq/test_manager
|
setup.py
|
Python
|
mit
| 1,716
|
# coding=utf-8
# Copyright 2022 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Rouge metric."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensor2tensor.utils import rouge
import tensorflow.compat.v1 as tf
class TestRouge2Metric(tf.test.TestCase):
"""Tests the rouge-2 metric."""
def testRouge2Identical(self):
hypotheses = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
references = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
self.assertAllClose(rouge.rouge_n(hypotheses, references), 1.0, atol=1e-03)
def testRouge2Disjoint(self):
hypotheses = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
references = np.array([[8, 9, 10, 11, 12, 13, 14, 15, 16, 17],
[9, 10, 11, 12, 13, 14, 15, 16, 17, 0]])
self.assertEqual(rouge.rouge_n(hypotheses, references), 0.0)
def testRouge2PartialOverlap(self):
hypotheses = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
references = np.array([[1, 9, 2, 3, 4, 5, 1, 10, 6, 7],
[1, 9, 2, 3, 4, 5, 1, 10, 6, 7]])
self.assertAllClose(rouge.rouge_n(hypotheses, references), 0.53, atol=1e-03)
class TestRougeLMetric(tf.test.TestCase):
"""Tests the rouge-l metric."""
def testRougeLIdentical(self):
hypotheses = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
references = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
self.assertAllClose(
rouge.rouge_l_sentence_level(hypotheses, references), 1.0, atol=1e-03)
def testRougeLDisjoint(self):
hypotheses = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
references = np.array([[8, 9, 10, 11, 12, 13, 14, 15, 16, 17],
[9, 10, 11, 12, 13, 14, 15, 16, 17, 0]])
self.assertEqual(rouge.rouge_l_sentence_level(hypotheses, references), 0.0)
def testRougeLPartialOverlap(self):
hypotheses = np.array([[1, 2, 3, 4, 5, 1, 6, 7, 0],
[1, 2, 3, 4, 5, 1, 6, 8, 7]])
references = np.array([[1, 9, 2, 3, 4, 5, 1, 10, 6, 7],
[1, 9, 2, 3, 4, 5, 1, 10, 6, 7]])
self.assertAllClose(
rouge.rouge_l_sentence_level(hypotheses, references), 0.837, atol=1e-03)
class TestRougeMetricsE2E(tf.test.TestCase):
"""Tests the rouge metrics end-to-end."""
def testRouge2MetricE2E(self):
vocab_size = 4
batch_size = 12
seq_length = 12
predictions = tf.one_hot(
np.random.randint(vocab_size, size=(batch_size, seq_length, 1, 1)),
depth=4,
dtype=tf.float32)
targets = np.random.randint(4, size=(12, 12, 1, 1))
with self.test_session() as session:
scores, _ = rouge.rouge_2_fscore(predictions,
tf.constant(targets, dtype=tf.int32))
a = tf.reduce_mean(scores)
session.run(tf.global_variables_initializer())
session.run(a)
def testRougeLMetricE2E(self):
vocab_size = 4
batch_size = 12
seq_length = 12
predictions = tf.one_hot(
np.random.randint(vocab_size, size=(batch_size, seq_length, 1, 1)),
depth=4,
dtype=tf.float32)
targets = np.random.randint(4, size=(12, 12, 1, 1))
with self.test_session() as session:
scores, _ = rouge.rouge_l_fscore(
predictions,
tf.constant(targets, dtype=tf.int32))
a = tf.reduce_mean(scores)
session.run(tf.global_variables_initializer())
session.run(a)
if __name__ == "__main__":
tf.test.main()
|
tensorflow/tensor2tensor
|
tensor2tensor/utils/rouge_test.py
|
Python
|
apache-2.0
| 4,407
|
import numpy as np
import tensorflow as tf
from tensorflow.contrib import legacy_seq2seq
from tensorflow.contrib import rnn
# Cloned from https://github.com/sherjilozair/char-rnn-tensorflow
# Used to sample trained models without having to call sample.py every time
# which is extremely slow. Instead we load the data once in utils.init_tf
# and use the data provided in the bot
class Model:
def __init__(self, args, training=True):
self.args = args
if not training:
args.batch_size = 1
args.seq_length = 1
# choose different rnn cell
if args.model == 'rnn':
cell_fn = rnn.RNNCell
elif args.model == 'gru':
cell_fn = rnn.GRUCell
elif args.model == 'lstm':
cell_fn = rnn.LSTMCell
elif args.model == 'nas':
cell_fn = rnn.NASCell
else:
raise Exception("model type not supported: {}".format(args.model))
# warp multi layered rnn cell into one cell with dropout
cells = []
for _ in range(args.num_layers):
cell = cell_fn(args.rnn_size)
if training and (args.output_keep_prob < 1.0 or args.input_keep_prob < 1.0):
cell = rnn.DropoutWrapper(cell,
input_keep_prob=args.input_keep_prob,
output_keep_prob=args.output_keep_prob)
cells.append(cell)
self.cell = cell = rnn.MultiRNNCell(cells, state_is_tuple=True)
# input/target data (int32 since input is char-level)
self.input_data = tf.placeholder(
tf.int32, [args.batch_size, args.seq_length])
self.targets = tf.placeholder(
tf.int32, [args.batch_size, args.seq_length])
self.initial_state = cell.zero_state(args.batch_size, tf.float32)
# softmax output layer, use softmax to classify
with tf.variable_scope('rnnlm'):
softmax_w = tf.get_variable("softmax_w",
[args.rnn_size, args.vocab_size])
softmax_b = tf.get_variable("softmax_b", [args.vocab_size])
# transform input to embedding
embedding = tf.get_variable("embedding", [args.vocab_size, args.rnn_size])
inputs = tf.nn.embedding_lookup(embedding, self.input_data)
# dropout beta testing: double check which one should affect next line
if training and args.output_keep_prob:
inputs = tf.nn.dropout(inputs, args.output_keep_prob)
# unstack the input to fits in rnn model
inputs = tf.split(inputs, args.seq_length, 1)
inputs = [tf.squeeze(input_, [1]) for input_ in inputs]
# loop function for rnn_decoder, which take the previous i-th cell's output and generate the (i+1)-th cell's input
def loop(prev, _):
prev = tf.matmul(prev, softmax_w) + softmax_b
prev_symbol = tf.stop_gradient(tf.argmax(prev, 1))
return tf.nn.embedding_lookup(embedding, prev_symbol)
# rnn_decoder to generate the ouputs and final state. When we are not training the model, we use the loop function.
outputs, last_state = legacy_seq2seq.rnn_decoder(inputs, self.initial_state, cell, loop_function=loop if not training else None, scope='rnnlm')
output = tf.reshape(tf.concat(outputs, 1), [-1, args.rnn_size])
# output layer
self.logits = tf.matmul(output, softmax_w) + softmax_b
self.probs = tf.nn.softmax(self.logits)
# loss is calculate by the log loss and taking the average.
loss = legacy_seq2seq.sequence_loss_by_example(
[self.logits],
[tf.reshape(self.targets, [-1])],
[tf.ones([args.batch_size * args.seq_length])])
with tf.name_scope('cost'):
self.cost = tf.reduce_sum(loss) / args.batch_size / args.seq_length
self.final_state = last_state
self.lr = tf.Variable(0.0, trainable=False)
tvars = tf.trainable_variables()
# calculate gradients
grads, _ = tf.clip_by_global_norm(tf.gradients(self.cost, tvars),
args.grad_clip)
with tf.name_scope('optimizer'):
optimizer = tf.train.AdamOptimizer(self.lr)
# apply gradient change to the all the trainable variable.
self.train_op = optimizer.apply_gradients(zip(grads, tvars))
# instrument tensorboard
tf.summary.histogram('logits', self.logits)
tf.summary.histogram('loss', loss)
tf.summary.scalar('train_loss', self.cost)
def sample(self, sess, chars, vocab, num=200, prime='The ', sampling_type=1):
state = sess.run(self.cell.zero_state(1, tf.float32))
for char in prime[:-1]:
x = np.zeros((1, 1))
x[0, 0] = vocab[char]
feed = {self.input_data: x, self.initial_state: state}
[state] = sess.run([self.final_state], feed)
def weighted_pick(weights):
t = np.cumsum(weights)
s = np.sum(weights)
return int(np.searchsorted(t, np.random.rand(1)*s))
ret = prime
char = prime[-1]
for _ in range(num):
x = np.zeros((1, 1))
x[0, 0] = vocab[char]
feed = {self.input_data: x, self.initial_state: state}
[probs, state] = sess.run([self.probs, self.final_state], feed)
p = probs[0]
if sampling_type == 0:
sample = np.argmax(p)
elif sampling_type == 2:
if char == ' ':
sample = weighted_pick(p)
else:
sample = np.argmax(p)
else: # sampling_type == 1 default:
sample = weighted_pick(p)
pred = chars[sample]
ret += pred
char = pred
return ret
|
s0hvaperuna/Not-a-bot
|
char_rnn/model.py
|
Python
|
mit
| 5,892
|
from Screens.Screen import Screen
from Components.ConfigList import ConfigListScreen
from Components.Sources.StaticText import StaticText
from Components.config import config, ConfigSubsection, ConfigBoolean, getConfigListEntry, ConfigSelection, ConfigYesNo, ConfigIP
from Components.Network import iNetwork
from Components.Ipkg import IpkgComponent
from enigma import eDVBDB
config.misc.installwizard = ConfigSubsection()
config.misc.installwizard.hasnetwork = ConfigBoolean(default = False)
config.misc.installwizard.ipkgloaded = ConfigBoolean(default = False)
config.misc.installwizard.channellistdownloaded = ConfigBoolean(default = False)
class InstallWizard(Screen, ConfigListScreen):
STATE_UPDATE = 0
STATE_CHOISE_CHANNELLIST = 1
# STATE_CHOISE_SOFTCAM = 2
def __init__(self, session, args = None):
Screen.__init__(self, session)
self.index = args
self.list = []
ConfigListScreen.__init__(self, self.list)
if self.index == self.STATE_UPDATE:
config.misc.installwizard.hasnetwork.value = False
config.misc.installwizard.ipkgloaded.value = False
modes = {0: " "}
self.enabled = ConfigSelection(choices = modes, default = 0)
self.adapters = [(iNetwork.getFriendlyAdapterName(x),x) for x in iNetwork.getAdapterList()]
is_found = False
for x in self.adapters:
if x[1] == 'eth0' or x[1] == 'eth1':
if iNetwork.getAdapterAttribute(x[1], 'up'):
self.ipConfigEntry = ConfigIP(default = iNetwork.getAdapterAttribute(x[1], "ip"))
iNetwork.checkNetworkState(self.checkNetworkCB)
if_found = True
else:
iNetwork.restartNetwork(self.checkNetworkLinkCB)
break
if is_found is False:
self.createMenu()
elif self.index == self.STATE_CHOISE_CHANNELLIST:
self.enabled = ConfigYesNo(default = True)
modes = {"ATV": "ATV default(13e-19e)", "19e": "Astra 1", "23e": "Astra 3", "19e-23e": "Astra 1 Astra 3", "19e-23e-28e": "Astra 1 Astra 2 Astra 3", "13e-19e-23e-28e": "Astra 1 Astra 2 Astra 3 Hotbird"}
self.channellist_type = ConfigSelection(choices = modes, default = "ATV")
self.createMenu()
# elif self.index == self.STATE_CHOISE_SOFTCAM:
# self.enabled = ConfigYesNo(default = True)
# modes = {"cccam": _("default") + " (CCcam)", "scam": "scam"}
# self.softcam_type = ConfigSelection(choices = modes, default = "cccam")
# self.createMenu()
def checkNetworkCB(self, data):
if data < 3:
config.misc.installwizard.hasnetwork.value = True
self.createMenu()
def checkNetworkLinkCB(self, retval):
if retval:
iNetwork.checkNetworkState(self.checkNetworkCB)
else:
self.createMenu()
def createMenu(self):
try:
test = self.index
except:
return
self.list = []
if self.index == self.STATE_UPDATE:
if config.misc.installwizard.hasnetwork.value:
self.list.append(getConfigListEntry(_("Your internet connection is working (ip: %s)") % (self.ipConfigEntry.getText()), self.enabled))
else:
self.list.append(getConfigListEntry(_("Your receiver does not have an internet connection"), self.enabled))
elif self.index == self.STATE_CHOISE_CHANNELLIST:
self.list.append(getConfigListEntry(_("Install channel list"), self.enabled))
if self.enabled.value:
self.list.append(getConfigListEntry(_("Channel list type"), self.channellist_type))
# elif self.index == self.STATE_CHOISE_SOFTCAM:
# self.list.append(getConfigListEntry(_("Install softcam"), self.enabled))
# if self.enabled.value:
# self.list.append(getConfigListEntry(_("Softcam type"), self.softcam_type))
self["config"].list = self.list
self["config"].l.setList(self.list)
def keyLeft(self):
if self.index == 0:
return
ConfigListScreen.keyLeft(self)
self.createMenu()
def keyRight(self):
if self.index == 0:
return
ConfigListScreen.keyRight(self)
self.createMenu()
def run(self):
if self.index == self.STATE_UPDATE:
if config.misc.installwizard.hasnetwork.value:
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (updating packages)'), IpkgComponent.CMD_UPDATE)
elif self.index == self.STATE_CHOISE_CHANNELLIST and self.enabled.value and self.channellist_type.value != "ATV":
self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading channel list)'), IpkgComponent.CMD_REMOVE, {'package': 'enigma2-plugin-settings-henksat-' + self.channellist_type.value})
# elif self.index == self.STATE_CHOISE_SOFTCAM and self.enabled.value:
# self.session.open(InstallWizardIpkgUpdater, self.index, _('Please wait (downloading softcam)'), IpkgComponent.CMD_INSTALL, {'package': 'enigma2-plugin-softcams-' + self.softcam_type.value})
return
class InstallWizardIpkgUpdater(Screen):
def __init__(self, session, index, info, cmd, pkg = None):
Screen.__init__(self, session)
self["statusbar"] = StaticText(info)
self.pkg = pkg
self.index = index
self.state = 0
self.ipkg = IpkgComponent()
self.ipkg.addCallback(self.ipkgCallback)
if self.index == InstallWizard.STATE_CHOISE_CHANNELLIST:
self.ipkg.startCmd(cmd, {'package': 'enigma2-plugin-settings-*'})
else:
self.ipkg.startCmd(cmd, pkg)
def ipkgCallback(self, event, param):
if event == IpkgComponent.EVENT_DONE:
if self.index == InstallWizard.STATE_UPDATE:
config.misc.installwizard.ipkgloaded.value = True
elif self.index == InstallWizard.STATE_CHOISE_CHANNELLIST:
if self.state == 0:
self.ipkg.startCmd(IpkgComponent.CMD_INSTALL, self.pkg)
self.state = 1
return
else:
config.misc.installwizard.channellistdownloaded.value = True
eDVBDB.getInstance().reloadBouquets()
eDVBDB.getInstance().reloadServicelist()
self.close()
|
0sc0d3r/enigma2
|
lib/python/Screens/InstallWizard.py
|
Python
|
gpl-2.0
| 5,657
|
from direct.task.Task import Task
import random
from toontown.classicchars import CCharPaths
from toontown.safezone import Playground
from toontown.toonbase import TTLocalizer
class TTPlayground(Playground.Playground):
def enter(self, requestStatus):
Playground.Playground.enter(self, requestStatus)
taskMgr.doMethodLater(1, self.__birds, 'TT-birds')
def exit(self):
Playground.Playground.exit(self)
taskMgr.remove('TT-birds')
def showPaths(self):
self.showPathPoints(CCharPaths.getPaths(TTLocalizer.Mickey))
def __birds(self, task):
base.playSfx(random.choice(self.loader.birdSound))
time = random.random() * 20.0 + 1
taskMgr.doMethodLater(time, self.__birds, 'TT-birds')
return Task.done
|
Spiderlover/Toontown
|
toontown/safezone/TTPlayground.py
|
Python
|
mit
| 783
|
##
# Copyright (c) 2012-2014 Apple Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##
from caldavclientlibrary.protocol.url import URL
from caldavclientlibrary.protocol.webdav.definitions import davxml
from contrib.performance.sqlusage.requests.httpTests import HTTPTestBase
from txweb2.dav.util import joinURL
from pycalendar.datetime import DateTime
ICAL = """BEGIN:VCALENDAR
CALSCALE:GREGORIAN
PRODID:-//Example Inc.//Example Calendar//EN
VERSION:2.0
BEGIN:VTIMEZONE
LAST-MODIFIED:20040110T032845Z
TZID:US/Eastern
BEGIN:DAYLIGHT
DTSTART:20000404T020000
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:20001026T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
DTSTAMP:20051222T205953Z
CREATED:20060101T150000Z
DTSTART;TZID=US/Eastern:%d0101T100000
DURATION:PT1H
SUMMARY:event 1
UID:sync-collection-%d-ics
END:VEVENT
END:VCALENDAR
""".replace("\n", "\r\n")
class SyncTest(HTTPTestBase):
"""
A sync operation
"""
def __init__(self, label, sessions, logFilePath, full, count):
super(SyncTest, self).__init__(label, sessions, logFilePath)
self.full = full
self.count = count
self.synctoken = ""
def prepare(self):
"""
Do some setup prior to the real request.
"""
if not self.full:
# Get current sync token
results, _ignore_bad = self.sessions[0].getProperties(URL(path=self.sessions[0].calendarHref), (davxml.sync_token,))
self.synctoken = results[davxml.sync_token]
# Add resources to create required number of changes
now = DateTime.getNowUTC()
for i in range(self.count):
href = joinURL(self.sessions[0].calendarHref, "sync-collection-%d.ics" % (i + 1,))
self.sessions[0].writeData(URL(path=href), ICAL % (now.getYear() + 1, i + 1,), "text/calendar")
def doRequest(self):
"""
Execute the actual HTTP request.
"""
props = (
davxml.getetag,
davxml.getcontenttype,
)
# Run sync collection
self.sessions[0].syncCollection(URL(path=self.sessions[0].calendarHref), self.synctoken, props)
def cleanup(self):
"""
Do some cleanup after the real request.
"""
if not self.full:
# Remove created resources
for i in range(self.count):
href = joinURL(self.sessions[0].calendarHref, "sync-collection-%d.ics" % (i + 1,))
self.sessions[0].deleteResource(URL(path=href))
|
trevor/calendarserver
|
contrib/performance/sqlusage/requests/sync.py
|
Python
|
apache-2.0
| 3,208
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_analytic_cost_ledger_journal_report(osv.osv_memory):
_name = 'account.analytic.cost.ledger.journal.report'
_description = 'Account Analytic Cost Ledger For Journal Report'
_columns = {
'date1': fields.date('Start of period', required=True),
'date2': fields.date('End of period', required=True),
'journal': fields.many2many('account.analytic.journal', 'ledger_journal_rel', 'ledger_id', 'journal_id', 'Journals'),
}
_defaults = {
'date1': lambda *a: time.strftime('%Y-01-01'),
'date2': lambda *a: time.strftime('%Y-%m-%d')
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids)[0]
datas = {
'ids': context.get('active_ids',[]),
'model': 'account.analytic.account',
'form': data
}
return {
'type': 'ir.actions.report.xml',
'report_name': 'account.analytic.account.quantity_cost_ledger',
'datas': datas,
}
account_analytic_cost_ledger_journal_report()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
inovtec-solutions/OpenERP
|
openerp/addons/account/project/wizard/account_analytic_cost_ledger_for_journal_report.py
|
Python
|
agpl-3.0
| 2,240
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 07 16:25:23 2014
@author: Yang Xuefeng
"""
from __future__ import division
import numpy as np
import cPickle as cp
import sys
import scipy.stats as ss
import bisect
import argparse
class evaluation(object):
def __init__(self, wl):
s = set(wl.keys())
f = np.load(r'D:\SS\ResourceData\antonmys\wordsim353.npz')
words = f['w']
score = f['s']
score = [float(i) for i in score]
select_index = [i for i in xrange(len(words)) if words[i][0] in s and words[i][1] in s]
self.words353 = [words[i] for i in select_index]
self.score353 = [score[i] for i in select_index]
self.index353 = [(wl.get(i[0],0), wl.get(i[1],0)) for i in self.words353]
f = np.load(r'D:\SS\ResourceData\antonmys\turk771.npz')
words = f['w']
score = f['s']
score = [float(i) for i in score]
select_index = [i for i in xrange(len(words)) if words[i][0] in s and words[i][1] in s]
self.words771 = [words[i] for i in select_index]
self.score771 = [score[i] for i in select_index]
self.index771 = [(wl.get(i[0],0), wl.get(i[1],0)) for i in self.words771]
f = np.load(r'D:\SS\ResourceData\antonmys\rg65.npz')
words = f['w']
score = f['s']
score = [float(i) for i in score]
select_index = [i for i in xrange(len(words)) if words[i][0] in s and words[i][1] in s]
self.words65 = [words[i] for i in select_index]
self.score65 = [score[i] for i in select_index]
self.index65 = [(wl.get(i[0],0), wl.get(i[1],0)) for i in self.words65]
f = np.load(r'D:\SS\ResourceData\antonmys\yp130.npz')
words = f['w']
score = f['s']
score = [float(i) for i in score]
select_index = [i for i in xrange(len(words)) if words[i][0] in s and words[i][1] in s]
self.words130 = [words[i] for i in select_index]
self.score130 = [score[i] for i in select_index]
self.index130 = [(wl.get(i[0],0), wl.get(i[1],0)) for i in self.words130]
f = np.load(r'D:\SS\ResourceData\antonmys\M3k.npz')
words = f['w']
score = f['s']
score = [float(i) for i in score]
select_index = [i for i in xrange(len(words)) if words[i][0] in s and words[i][1] in s]
self.words3k = [words[i] for i in select_index]
self.score3k = [score[i] for i in select_index]
self.index3k = [(wl.get(i[0],0), wl.get(i[1],0)) for i in self.words3k]
l = cp.load(open(r'D:\SS\ResourceData\antonmys\analogy_g.pkl'))
l = [i for i in l if i[0] in s and i[1] in s and i[2] in s and i[3] in s]
self.word_g = l
self.index_g = [(wl[i[0]],wl[i[1]],wl[i[2]],wl[i[3]]) for i in l]
index_list = zip(*self.index_g)
self.index_g_mat = [list(i) for i in index_list]
l = cp.load(open(r'D:\SS\ResourceData\antonmys\analogy_m.pkl'))
l = [i for i in l if i[0] in s and i[1] in s and i[2] in s and i[3] in s]
self.word_m = l
self.index_m = [(wl[i[0]],wl[i[1]],wl[i[2]],wl[i[3]]) for i in l]
index_list = zip(*self.index_m)
self.index_m_mat = [list(i) for i in index_list]
f = np.load(r'D:\SS\ResourceData\antonmys\sent_complete.npz')
select = []
for i in xrange(len(f['c'])):
t = [1 for j in f['c'][i] if j in s]
p = [1 for j in f['s'][i] if j in s]
if len(t)==5 and 2*len(p)>len(f['s'][i]):
select.append(i)
#print len(select)
self.sents = [f['s'][i] for i in select]
self.candidates = [f['c'][i] for i in select]
self.answers = [f['a'][i] for i in select]
self.index_sents = []
self.index_candidates = []
self.index_answers = [wl[i] for i in self.answers]
for i in self.sents:
t = [wl[j] for j in i if j in s]
self.index_sents.append(t)
for i in self.candidates:
t = [wl[j] for j in i]
self.index_candidates.append(t)
def sent_completation(self,epoch,wm):
r = []
for i in xrange(len(self.answers)):
t = []
for j in self.index_candidates[i]:
simi = [self.get_cosine(wm[j,:], wm[k,:]) for k in self.index_sents[i]]
t.append((j,np.mean(simi)))
t.sort(key=lambda x:x[1])
r.append(t[-1][0])
f = [1 if r[i]==self.index_answers[i] else 0 for i in xrange(len(r))]
result = sum(f)/len(r)
return result
def word353(self, wm):
simi = [self.get_cosine(wm[i[0],:], wm[i[1],:]) for i in self.index353]
r,p = ss.spearmanr(simi, self.score353)
return r
def turk771(self, wm):
simi = [self.get_cosine(wm[i[0],:], wm[i[1],:]) for i in self.index771]
r,p = ss.spearmanr(simi, self.score771)
return r
def rg65(self,wm):
simi = [self.get_cosine(wm[i[0],:], wm[i[1],:]) for i in self.index65]
r,p = ss.spearmanr(simi, self.score65)
return r
def yp130(self, wm):
simi = [self.get_cosine(wm[i[0],:], wm[i[1],:]) for i in self.index130]
r,p = ss.spearmanr(simi, self.score130)
return r
def m3k(self, wm):
simi = [self.get_cosine(wm[i[0],:], wm[i[1],:]) for i in self.index3k]
r,p = ss.spearmanr(simi, self.score3k)
return r
def analogy(self,wm,t):
wm_t = wm.transpose()
if t == 'g':
a = self.index_g_mat[0]
b = self.index_g_mat[1]
c = self.index_g_mat[2]
d = self.index_g_mat[3]
elif t == 'm':
a = self.index_m_mat[0]
b = self.index_m_mat[1]
c = self.index_m_mat[2]
d = self.index_m_mat[3]
ma = wm[a,:]
mb = wm[b,:]
mc = wm[c,:]
m = mb + mc- ma
l = []
for i in xrange(len(a)):
simi = np.dot(m[i,:],wm_t)
simi[[a[i],b[i],c[i]]] = -1
l.append(np.argmax(simi))
r = [1 if d[i]==l[i] else 0 for i in xrange(len(d))]
r = sum(r)/len(d)
return r
def eval_all(self,wm):
r = []
r.append(self.word353(wm))
r.append(self.rg65(wm))
r.append(self.yp130(wm))
r.append(self.turk771(wm))
r.append(self.m3k(wm))
r.append(self.analogy(wm,'g'))
r.append(self.analogy(wm,'m'))
r.append(self.sent_completation(wm))
return r
class fine_tuning(object):
def __init__(self, wl,sr):
"""
"""
self.sr = float(sr)
self.wl = wl
it = wl.items()
it = [(i[1],i[0]) for i in it]
self.lw = dict(it)
def normalization_mat(self, wm):
a,b = wm.shape
norms = [np.sqrt(np.sum(np.square(wm[i,:]))) for i in xrange(a)]
norms = np.array(norms).reshape(a,1)
wm = wm/norms
return wm
def normalization_vec(self, v):
norm = np.sqrt(np.sum(np.square(v)))
v = v/norm
return v
def random_matrix_thres(self, wm):
np.random.seed(10000)
a,b = wm.shape
rm = np.random.uniform(-1,1,(a,b))
norms = [np.sqrt(np.sum(np.square(rm[i,:]))) for i in xrange(a)]
norms = np.array(norms).reshape(a,1)
rm = rm/norms
rand_index = np.random.randint(0,a-1,300)
r = []
rmt = rm.transpose()
for i in rand_index:
simi = np.dot(rm[i,:],rmt)
r.append(np.mean(np.sort(simi)[-3:-1]))
thres = np.mean(r)
self.thres = thres
def judge(self,i1,i2):
if i1 < i2:
return 1
elif i1 > i2:
return -1
else:
return 0
def get_local_direction(self, k, wm,simi,kb):
index = np.argsort(simi)
simi_sort = simi[index]
number = wm.shape[0] - bisect.bisect_left(simi_sort, self.thres)
if number > 300:
number = 300
index = index[::-1]
index_dict = {index[i]:i-1 for i in xrange(len(index))}
inter = set.intersection(set(index[0:number]),set(kb))
kb_bad = set(kb)-inter
index_bad = set(index[0:number])-inter
index_bad_sign = [(i,-1) for i in index_bad]
index_bad_error = [abs(number-index_dict[i]) for i in index_bad]
kb_bad_sign = [(i,1) for i in kb_bad]
kb_bad_error = [abs(index_dict[kb[i]]-i) for i in xrange(len(kb)) if kb[i] in kb_bad]
inter_sign_error = [abs(index_dict[kb[i]]-i) for i in xrange(len(kb)) if kb[i] in inter]
inter_sign = [(kb[i],self.judge(i,index_dict[kb[i]])) for i in xrange(len(kb)) if kb[i] in inter]
inter_sign = [i for i in inter_sign if i[1]!=0]
kb_bad_sign.extend(inter_sign)
kb_bad_sign.extend(index_bad_sign)
kb_bad_error.extend(index_bad_error)
kb_bad_error.extend(inter_sign_error)
error = np.sum(kb_bad_error)
index_sign = zip(*kb_bad_sign)
index = list(index_sign[0])
sign = np.array(index_sign[1]).reshape(len(index_sign[1]),1)
temp = wm[index,:]
result = sign * temp
return result ,error
def get_update(self, k, wm, simi, kb):
"""
"""
result, error = self.get_local_direction(k,wm,simi,kb)
result = np.mean(result,axis=0).reshape(1,result.shape[1])
result = self.normalization_vec(result)
return result, error
def get_cosine(self, x, y):
nominator = np.sum( x * y )
dominator = np.sqrt(np.sum(x*x)) * np.sqrt(np.sum(y*y))
return nominator/dominator
def training(self, wl, wm, kb, eva,evaluate=True):
epoch = 1
wm = ft.normalization_mat(wm)
wm_t = wm.transpose()
if eva:
result = eva.eval_all(wm)
print result
error_list = []
error_list.append(100000000000)
stop = True
learning_rate = 0.1
while(stop):
count = 0
l_e = []
print 'epoch: {}'.format(epoch)
for k in kb.keys():
count = count + 1
simi = np.dot(wm[k,:],wm_t)
update, error = self.get_update(k, wm, simi, kb[k])
#print error
l_e.append(error)
update = update * learning_rate
wm[k,:] = wm[k,:]+ update
wm[k,:] = ft.normalization_vec(wm[k,:])
sys.stdout.write('{:10d} fin'.format(count))
sys.stdout.write('\r')
sys.stdout.flush()
epoch = epoch + 1
if eva:
result = eva.eval_all(wm)
print result
error = np.mean(l_e)
error_list.append(error)
if len(error_list)>2:
if error_list[-2]-error_list[-1]< error_list[1] * ft.sr:
stop = False
print error
return wm, error_list
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-wm','-word_matrix')
parser.add_argument('-wl','-word_list')
parser.add_argument('-kb','-knowledge_base')
parser.add_argument('-out','-output_file')
#parser.add_argument('-res','-output_result')
parser.add_argument('-s','-stop_rate')
args = parser.parse_args()
name_wl = args.wl
name_wm = args.wm
name_kb = args.kb
name_out = args.out
#name_res = args.res
sr = args.s
print 'loading'
kb = cp.load(open(name_kb))
wl = cp.load(open(name_wl))
wm = np.load(name_wm)
norms = np.sqrt(np.sum(np.square(wm),axis=1))
print 'Generating Threshold'
ft = fine_tuning(wl,sr)
ft.random_matrix_thres(wm)
#eva = evaluation(wl)
print 'Training Start'
eva = False
wm,el = ft.training(wl, wm, kb, eva)
np.save(name_out,wm)
|
YangXuefeng/SWEL
|
train_embedding_git.py
|
Python
|
mit
| 12,334
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
import os.path
import biplist
settingsFilename = inspect.getframeinfo(inspect.currentframe()).filename
settingsPath = os.path.dirname(os.path.abspath(settingsFilename))
#
# Example settings file for dmgbuild
#
# Use like this: dmgbuild -s settings.py "Test Volume" test.dmg
# You can actually use this file for your own application (not just TextEdit)
# by doing e.g.
#
# dmgbuild -s settings.py -D app=/path/to/My.app "My Application" MyApp.dmg
# .. Useful stuff ..............................................................
application = defines.get('app', '/Applications/TextEdit.app')
appname = os.path.basename(application)
def icon_from_app(app_path):
plist_path = os.path.join(app_path, 'Contents', 'Info.plist')
plist = biplist.readPlist(plist_path)
icon_name = plist['CFBundleIconFile']
icon_root, icon_ext = os.path.splitext(icon_name)
if not icon_ext:
icon_ext = '.icns'
icon_name = icon_root + icon_ext
return os.path.join(app_path, 'Contents', 'Resources', icon_name)
# .. Basics ....................................................................
# Uncomment to override the output filename
# filename = 'test.dmg'
# Uncomment to override the output volume name
# volume_name = 'Test'
# Volume format (see hdiutil create -help)
format = defines.get('format', 'UDBZ')
# Volume size
size = defines.get('size', None)
# Files to include
files = [application]
# Symlinks to create
symlinks = {'Applications': '/Applications'}
# Volume icon
#
# You can either define icon, in which case that icon file will be copied to the
# image, *or* you can define badge_icon, in which case the icon file you specify
# will be used to badge the system's Removable Disk icon
#
# icon = '/path/to/icon.icns'
badge_icon = icon_from_app(application)
# Where to put the icons
icon_locations = {
# appname: (140, 120),
# 'Applications': (500, 120)
appname: (175, 180),
'Applications': (440, 180)
}
# .. Window configuration ......................................................
# Background
#
# This is a STRING containing any of the following:
#
# #3344ff - web-style RGB color
# #34f - web-style RGB color, short form (#34f == #3344ff)
# rgb(1,0,0) - RGB color, each value is between 0 and 1
# hsl(120,1,.5) - HSL (hue saturation lightness) color
# hwb(300,0,0) - HWB (hue whiteness blackness) color
# cmyk(0,1,0,0) - CMYK color
# goldenrod - X11/SVG named color
# builtin-arrow - A simple built-in background with a blue arrow
# /foo/bar/baz.png - The path to an image file
#
# The hue component in hsl() and hwb() may include a unit; it defaults to
# degrees ('deg'), but also supports radians ('rad') and gradians ('grad'
# or 'gon').
#
# Other color components may be expressed either in the range 0 to 1, or
# as percentages (e.g. 60% is equivalent to 0.6).
# background = 'builtin-arrow'
background = os.path.join(settingsPath, 'background.tiff')
show_status_bar = False
show_tab_view = False
show_toolbar = False
show_pathbar = False
show_sidebar = False
sidebar_width = 180
# Window position in ((x, y), (w, h)) format
# window_rect = ((100, 100), (640, 280))
window_rect = ((100, 100), (600, 400))
# Select the default view; must be one of
#
# 'icon-view'
# 'list-view'
# 'column-view'
# 'coverflow'
#
default_view = 'icon-view'
# General view configuration
show_icon_preview = False
# Set these to True to force inclusion of icon/list view settings (otherwise
# we only include settings for the default view)
include_icon_view_settings = 'auto'
include_list_view_settings = 'auto'
# .. Icon view configuration ...................................................
arrange_by = None
grid_offset = (0, 0)
grid_spacing = 100
scroll_position = (0, 0)
label_pos = 'bottom' # or 'right'
text_size = 16
icon_size = 96
# .. List view configuration ...................................................
# Column names are as follows:
#
# name
# date-modified
# date-created
# date-added
# date-last-opened
# size
# kind
# label
# version
# comments
#
list_icon_size = 16
list_text_size = 12
list_scroll_position = (0, 0)
list_sort_by = 'name'
list_use_relative_dates = True
list_calculate_all_sizes = False,
list_columns = ('name', 'date-modified', 'size', 'kind', 'date-added')
list_column_widths = {
'name': 300,
'date-modified': 181,
'date-created': 181,
'date-added': 181,
'date-last-opened': 181,
'size': 97,
'kind': 115,
'label': 100,
'version': 75,
'comments': 300,
}
list_column_sort_directions = {
'name': 'ascending',
'date-modified': 'descending',
'date-created': 'descending',
'date-added': 'descending',
'date-last-opened': 'descending',
'size': 'descending',
'kind': 'ascending',
'label': 'ascending',
'version': 'ascending',
'comments': 'ascending',
}
# .. License configuration .....................................................
# Text in the license configuration is stored in the resources, which means
# it gets stored in a legacy Mac encoding according to the language. dmgbuild
# will *try* to convert Unicode strings to the appropriate encoding, *but*
# you should be aware that Python doesn't support all of the necessary encodings;
# in many cases you will need to encode the text yourself and use byte strings
# instead here.
# Recognized language names are:
#
# af_ZA, ar, be_BY, bg_BG, bn, bo, br, ca_ES, cs_CZ, cy, da_DK, de_AT, de_CH,
# de_DE, dz_BT, el_CY, el_GR, en_AU, en_CA, en_GB, en_IE, en_SG, en_US, eo,
# es_419, es_ES, et_EE, fa_IR, fi_FI, fo_FO, fr_001, fr_BE, fr_CA, fr_CH,
# fr_FR, ga-Latg_IE, ga_IE, gd, grc, gu_IN, gv, he_IL, hi_IN, hr_HR, hu_HU,
# hy_AM, is_IS, it_CH, it_IT, iu_CA, ja_JP, ka_GE, kl, ko_KR, lt_LT, lv_LV,
# mk_MK, mr_IN, mt_MT, nb_NO, ne_NP, nl_BE, nl_NL, nn_NO, pa, pl_PL, pt_BR,
# pt_PT, ro_RO, ru_RU, se, sk_SK, sl_SI, sr_RS, sv_SE, th_TH, to_TO, tr_TR,
# uk_UA, ur_IN, ur_PK, uz_UZ, vi_VN, zh_CN, zh_TW
# license = {
# 'default-language': 'en_US',
# 'licenses': {
# # For each language, the text of the license. This can be plain text,
# # RTF (in which case it must start "{\rtf1"), or a path to a file
# # containing the license text. If you're using RTF,
# # watch out for Python escaping (or read it from a file).
# 'English': b'''{\\rtf1\\ansi\\ansicpg1252\\cocoartf1504\\cocoasubrtf820
# {\\fonttbl\\f0\\fnil\\fcharset0 Helvetica-Bold;\\f1\\fnil\\fcharset0 Helvetica;}
# {\\colortbl;\\red255\\green255\\blue255;\\red0\\green0\\blue0;}
# {\\*\\expandedcolortbl;;\\cssrgb\\c0\\c0\\c0;}
# \\paperw11905\\paperh16837\\margl1133\\margr1133\\margb1133\\margt1133
# \\deftab720
# \\pard\\pardeftab720\\sa160\\partightenfactor0
# \\f0\\b\\fs60 \\cf2 \\expnd0\\expndtw0\\kerning0
# \\up0 \\nosupersub \\ulnone \\outl0\\strokewidth0 \\strokec2 Test License\\
# \\pard\\pardeftab720\\sa160\\partightenfactor0
# \\fs36 \\cf2 \\strokec2 What is this?\\
# \\pard\\pardeftab720\\sa160\\partightenfactor0
# \\f1\\b0\\fs22 \\cf2 \\strokec2 This is the English license. It says what you are allowed to do with this software.\\
# \\
# }''',
# },
# 'buttons': {
# # For each language, text for the buttons on the licensing window.
# #
# # Default buttons and text are built-in for the following languages:
# #
# # English (en_US), German (de_DE), Spanish (es_ES), French (fr_FR),
# # Italian (it_IT), Japanese (ja_JP), Dutch (nl_NL), Swedish (sv_SE),
# # Brazilian Portuguese (pt_BR), Simplified Chinese (zh_CN),
# # Traditional Chinese (zh_TW), Danish (da_DK), Finnish (fi_FI),
# # Korean (ko_KR), Norwegian (nb_NO)
# #
# # You don't need to specify them for those languages; if you fail to
# # specify them for some other language, English will be used instead.
# 'en_US': (
# b'English',
# b'Agree',
# b'Disagree',
# b'Print',
# b'Save',
# b'If you agree with the terms of this license, press "Agree" to '
# b'install the software. If you do not agree, press "Disagree".'
# ),
# },
# }
|
OpenEstate/OpenEstate-Tool-Server
|
src/dmgbuild/settings.py
|
Python
|
apache-2.0
| 8,372
|
import locale
from jinja2.utils import generate_lorem_ipsum
from pelican.contents import Article, Author
from pelican.paginator import Paginator
from pelican.settings import DEFAULT_CONFIG
from pelican.tests.support import get_settings, unittest
# generate one paragraph, enclosed with <p>
TEST_CONTENT = str(generate_lorem_ipsum(n=1))
TEST_SUMMARY = generate_lorem_ipsum(n=1, html=False)
class TestPage(unittest.TestCase):
def setUp(self):
super().setUp()
self.old_locale = locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_ALL, 'C')
self.page_kwargs = {
'content': TEST_CONTENT,
'context': {
'localsiteurl': '',
},
'metadata': {
'summary': TEST_SUMMARY,
'title': 'foo bar',
},
'source_path': '/path/to/file/foo.ext'
}
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
def test_save_as_preservation(self):
settings = get_settings()
# fix up pagination rules
from pelican.paginator import PaginationRule
pagination_rules = [
PaginationRule(*r) for r in settings.get(
'PAGINATION_PATTERNS',
DEFAULT_CONFIG['PAGINATION_PATTERNS'],
)
]
settings['PAGINATION_PATTERNS'] = sorted(
pagination_rules,
key=lambda r: r[0],
)
self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
object_list = [Article(**self.page_kwargs),
Article(**self.page_kwargs)]
paginator = Paginator('foobar.foo', 'foobar/foo', object_list,
settings)
page = paginator.page(1)
self.assertEqual(page.save_as, 'foobar.foo')
def test_custom_pagination_pattern(self):
from pelican.paginator import PaginationRule
settings = get_settings()
settings['PAGINATION_PATTERNS'] = [PaginationRule(*r) for r in [
(1, '/{url}', '{base_name}/index.html'),
(2, '/{url}{number}/', '{base_name}/{number}/index.html')
]]
self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
object_list = [Article(**self.page_kwargs),
Article(**self.page_kwargs)]
paginator = Paginator('blog/index.html', '//blog.my.site/',
object_list, settings, 1)
# The URL *has to* stay absolute (with // in the front), so verify that
page1 = paginator.page(1)
self.assertEqual(page1.save_as, 'blog/index.html')
self.assertEqual(page1.url, '//blog.my.site/')
page2 = paginator.page(2)
self.assertEqual(page2.save_as, 'blog/2/index.html')
self.assertEqual(page2.url, '//blog.my.site/2/')
def test_custom_pagination_pattern_last_page(self):
from pelican.paginator import PaginationRule
settings = get_settings()
settings['PAGINATION_PATTERNS'] = [PaginationRule(*r) for r in [
(1, '/{url}1/', '{base_name}/1/index.html'),
(2, '/{url}{number}/', '{base_name}/{number}/index.html'),
(-1, '/{url}', '{base_name}/index.html'),
]]
self.page_kwargs['metadata']['author'] = Author('Blogger', settings)
object_list = [Article(**self.page_kwargs),
Article(**self.page_kwargs),
Article(**self.page_kwargs)]
paginator = Paginator('blog/index.html', '//blog.my.site/',
object_list, settings, 1)
# The URL *has to* stay absolute (with // in the front), so verify that
page1 = paginator.page(1)
self.assertEqual(page1.save_as, 'blog/1/index.html')
self.assertEqual(page1.url, '//blog.my.site/1/')
page2 = paginator.page(2)
self.assertEqual(page2.save_as, 'blog/2/index.html')
self.assertEqual(page2.url, '//blog.my.site/2/')
page3 = paginator.page(3)
self.assertEqual(page3.save_as, 'blog/index.html')
self.assertEqual(page3.url, '//blog.my.site/')
|
getpelican/pelican
|
pelican/tests/test_paginator.py
|
Python
|
agpl-3.0
| 4,173
|
class Time(object):
def __init__(self, hours=0, minutes=0, seconds=0):
self.hours = hours
self.minutes = minutes
self.seconds = seconds
def __str__(self):
return str(self.hours) + ":" + \
str(self.minutes) + ":" + \
str(self.seconds)
|
medifle/python_6.00.1x
|
classTime.py
|
Python
|
mit
| 306
|
#
# Module implementing queues
#
# multiprocessing/queues.py
#
# Copyright (c) 2006-2008, R Oudkerk --- see COPYING.txt
#
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
import sys
import os
import threading
import collections
import time
import atexit
import weakref
from queue import Empty, Full
import _multiprocessing
from multiprocessing import Pipe
from multiprocessing.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocessing.util import debug, info, Finalize, register_after_fork
from multiprocessing.forking import assert_spawning
#
# Queue type using a pipe, buffer and thread
#
class Queue(object):
def __init__(self, maxsize=0):
if maxsize <= 0:
maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
assert_spawning(self)
return (self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
finally:
self._notempty.release()
def get(self, block=True, timeout=None):
if block and timeout is None:
self._rlock.acquire()
try:
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
else:
if block:
deadline = time.time() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if not self._poll(block and (deadline-time.time()) or 0.0):
raise Empty
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
def qsize(self):
# Raises NotImplementedError on Mac OSX because of broken sem_getvalue()
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
# Start thread which transfers data from buffer to pipe
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
self._wlock, self._writer.close),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
# On process exit we will wait for data to be flushed to pipe.
#
# However, if this process created the queue then all
# processes which use the queue will be descendants of this
# process. Therefore waiting for the queue to be flushed
# is pointless once all the child processes have been joined.
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
# Send sentinel to the thread queue object when garbage collected
self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
notempty.acquire()
try:
buffer.append(_sentinel)
notempty.notify()
finally:
notempty.release()
@staticmethod
def _feed(buffer, notempty, send, writelock, close):
debug('starting thread to feed data to pipe')
from .util import is_exiting
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
if wacquire is None:
send(obj)
else:
wacquire()
try:
send(obj)
finally:
wrelease()
except IndexError:
pass
except Exception as e:
# Since this runs in a daemon thread the resources it uses
# may be become unusable while the process is cleaning up.
# We ignore errors which happen after the process has
# started to cleanup.
try:
if is_exiting():
info('error in queue thread: %s', e)
else:
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
#
# A queue type which also supports join() and task_done() methods
#
# Note that if you do not call task_done() for each finished task then
# eventually the counter's semaphore may overflow causing Bad Things
# to happen.
#
class JoinableQueue(Queue):
def __init__(self, maxsize=0):
Queue.__init__(self, maxsize)
self._unfinished_tasks = Semaphore(0)
self._cond = Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, item, block=True, timeout=None):
Queue.put(self, item, block, timeout)
self._unfinished_tasks.release()
def task_done(self):
self._cond.acquire()
try:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
finally:
self._cond.release()
def join(self):
self._cond.acquire()
try:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
finally:
self._cond.release()
#
# Simplified Queue type -- really just a locked pipe
#
class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._make_methods()
def empty(self):
return not self._reader.poll()
def __getstate__(self):
assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
self._make_methods()
def _make_methods(self):
recv = self._reader.recv
racquire, rrelease = self._rlock.acquire, self._rlock.release
def get():
racquire()
try:
return recv()
finally:
rrelease()
self.get = get
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self.put = self._writer.send
else:
send = self._writer.send
wacquire, wrelease = self._wlock.acquire, self._wlock.release
def put(obj):
wacquire()
try:
return send(obj)
finally:
wrelease()
self.put = put
|
MalloyPower/parsing-python
|
front-end/testsuite-python-lib/Python-3.1/Lib/multiprocessing/queues.py
|
Python
|
mit
| 10,719
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
'''
Creation of a FA file from a compacted fact int file.
@author pierre peterlongo pierre.peterlongo@inria.fr
'''
import sys
import K3000_common as kc
def index_sequences_seek(compacted_facts_fa_file_name):
'''
Stores for each sequence header its position in the fa file. This is used latter to retrieve the corresponding sequences
'''
header_to_file_position = {}
compacted_facts_fa_file=open(compacted_facts_fa_file_name)
file_size=kc.file_size(compacted_facts_fa_file)
step=0
while(True):
if step%10000==0: kc.update_progress(compacted_facts_fa_file.tell()/file_size)
step+=1
pos=compacted_facts_fa_file.tell()
header_fa=compacted_facts_fa_file.readline()
if not header_fa: break
header_fa=header_fa.strip().split()[0] # remove the starting and ending positions from the headers. TODO use them for provinding the overlap length between nodes.
compacted_facts_fa_file.readline().strip() # dont care
header_to_file_position[kc.generate_header(header_fa[1:])]=pos
# print(header_fa[1:]," pos ",pos)
kc.update_progress(1)
compacted_facts_fa_file.close()
return header_to_file_position
def yield_occurring_positions_reverse(k,kmer, seq):
for i in range(len(seq)-k,-1,-1):
if seq[i:i+k] == kmer:
# if kc.hamming_near_perfect(seq[i:i+k],kmer):
yield i
def overlap_length(seqA, seqB):
'''
For two UPPER CASE sequences that overlap at least by at least k characters, return the length of the largest overlap with hamming distance < 5
1/ find on seqB all occurrence positions of the last kmer of seqA
2/ check for each position (from the biggest) that the overlap is perfect
3/ return the length of the biggest overlap.
'''
k=13
last_seqA_kmer=seqA[-k:]
# print("seqA", seqA)
# print("seqB", seqB)
# print("last_seqA_kmer", last_seqA_kmer)
erro_code=-1
for i in yield_occurring_positions_reverse(k,last_seqA_kmer, seqB):
if len(seqA[-i-k:]) != len(seqB[:i+k]): # a sequence is included into another one, we do not print those edges
erro_code=-2
if i+k > len(seqA):
erro_code=-2
if kc.hamming_near_perfect(seqA[-i-k:], seqB[:i+k]):
return len(seqA[-i-k:])
# else: print("what")
# TODO: here it may happens that two sequences have bad overlap. This is due to the
# fact that their N numbers was different because of read sequencing errors while phasing with kissreads
# One may correct the N number of the lowest covered sequence for instance.
return erro_code
# print(overlap_length("TCAACTACTTATTTGTCGTACAAAACTGTCCCGTACATAGGATGATCTTATTCCCGTACCGGATTTCGTACACAATAACAGGAACAATGTCGATATAAAATTTTCTTCAAATGGCTTCAACCCTTACATTATTATGGCAGACGATGTAAACTCTCTAGTCTTCTCAACTCTATTAATAATACATAGTAGTAGCTATTCAGCCATTTTAAAAACGCAATACAACGTTTGTCCCGTAATATT","CTACTTATTTGTCGTACAAAACTGTCCCGTACATAGGATGATCTTATTCCTGTACCGGATTTCGTACACAATAACAGGAACAATGTCGATATAAAATTTTCTTCAAATGGCTTCAACCCTTACATTATTATGGCAGACGACGTAAACTCTCTAGTCTTCTCAACTCTATTAATAATACATAGTAGTAGCTATTCAGCCATTTTAAAAACGCAATACAACGTTTGTCCCGTAATAT"))
def modify_gfa_file(gfa_file_name, compacted_facts_fa_file_name, header_to_file_position):
print ("H\t#################")
print ("H\t# GFA of variants")
print ("H\t#################")
print ("H\t# Nodes are (compacted) facts with their read mapping coverage. Eg. \"S 1 gtGCAATAAGAATTGTCTTTCTTATAATAATTGTCCAACTTAGgGTCAATTTCTGTACaaacaaCACCATCCAAt AS:-577h;-977l;1354l; SP:0_44;11_64;32_75; BP:0_41;-26_41;-25_41; FC:i:52 min:17 max:410 mean:180.0 AC:113;17;410\".")
print ("H\t# * field AS stands for \"Allele Set\". It reminds the ids of the variants that compose this fact and that were used for reconstructing the genomic sequence")
print ("H\t# * field SP stands for \"Sequence Position\". It indicates for each allele of the fact its starting and ending position in the ACGT sequence")
print ("H\t# * field BP stands for \"Bubble Position\". For each allele of the fact it indicates:")
print ("H\t# - first: the relative position of first nucleotide of the bubble with respect to the position of the last nucleotide of the bubble of the previous allele. This value is equal to zero for the first allele")
print ("H\t# - second: the length of the bubble of the allele")
print ("H\t# * field EV strands for \"Extreme Variant\". Facts having at least one variant with no input or no output edge are considered as facts having an extreme variants. Their value is set to EV:1. Else, the value is set to EV:0")
print ("H\t# * field FC is the coverage of the fact, as provided by the total number of reads that phased at least two alleles of the fact")
print ("H\t# - first: the relative position of first nucleotide of the bubble with respect to the position of the last nucleotide of the bubble of the previous allele. This value is equal to zero for the first allele")
print ("H\t# - second: the length of the bubble of the allele")
print ("H\t# * fields min, max, and mean stand resp. for the min, max and mean of the read coverage of all alleles")
print ("H\t# * field AC stands for \"Allele Coverage\". The number of reads that map each allele is given in the same order as the variant ids (eg. \"17;410;113;\" are respectively, the coverages of variants \"-577h;-977l;1354l\")")
print ("H\t# Four types of edges:")
print ("H\t# 1. Overlap between facts, Overlap length is >0. Eg, \"L 1 - 29384 + 8M OFL:i:2\"")
print ("H\t# \"S 1 ACGGACGGACCGT RC:i:24\", and")
print ("H\t# \"S 29384 CGGACCGTACGGACGATCCG; RC:i:43\".")
print ("H\t# OLF field reminds the length of the fact overlap length (number of variants overlapping)")
print ("H\t# 2. Facts linked by paired end reads. Eg \"L 10735 + 29384 + 0M FC:i:5\".")
print ("H\t# The coverage (FC field) indicates the number of pairend read linking the two facts")
print ("H\t# These links have a fake overlap of length 0.")
print ("H\t# 3. Facts linked by unitigs. The unitig finishing a fact overlaps the unitig starting another fact. Eg \"L 19946 + 11433 + -1M\".")
print ("H\t# These links are directed and validate the facts orientation. ")
print ("H\t# These links have a fake overlap of length -1.")
print ("H\t# 4. Facts sharing at least one SNP identifier.")
print ("H\t# These links have an overlap of length -2.")
gfa_file=open(gfa_file_name)
compacted_facts_fa_file=open(compacted_facts_fa_file_name)
node_id_to_sequence={}
file_size=kc.file_size(gfa_file)
step=0
while(True):
if step%10000==0: kc.update_progress(gfa_file.tell()/file_size)
step+=1
gfa_line=gfa_file.readline()
if not gfa_line: break
gfa_line.strip()
if gfa_line[0]=='H': continue #Header was changed
if gfa_line[0]=='S': #Deal with sequences
#FROM:
#S 1 -577h;-977l;1354l; SP:0_44;11_64;32_75; BP:0_41;-26_41;-25_41; EV:0 FC:i:52 min:17 max:410 mean:180.0 AC:410;17;113;
#TO
#S 1 gtGCAATAAGAATTGTCTTTCTTATAATAATTGTCCAACTTAGgGTCAATTTCTGTACaaacaaCACCATCCAAt SP:0_44;11_64;32_75; BP:0_41;-26_41;-25_41; EV:0 FC:i:52 AS:-577h;-977l;1354l; min:17 max:410 mean:180.0 AC:17;410;113;
gfa_line=gfa_line.split()
assert gfa_line[2] in header_to_file_position, gfa_line[2]+" is not in header_to_file_position"
compacted_facts_fa_file.seek(header_to_file_position[gfa_line[2]])
compacted_facts_fa_file.readline().strip() # dont care
sequence_fa=compacted_facts_fa_file.readline().strip()
# assert gfa_line[2] == allele_header,gfa_line[2]+" is not "+allele_header+" "+header_fa[1:]
node_id_to_sequence[gfa_line[1]]=sequence_fa #TODO: optimize this to avoid memory usage. One may store the position of the node in the file and retreive the sequence latter
print(gfa_line[0]+"\t"+gfa_line[1]+"\t"+sequence_fa+"\tAS:"+gfa_line[2]+"\t"+gfa_line[3]+"\t"+gfa_line[4]+"\t"+gfa_line[5]+"\t"+gfa_line[6]+"\t"+gfa_line[7]+"\t"+gfa_line[8]+"\t"+gfa_line[9]+"\t"+gfa_line[10])
continue
if gfa_line[0]=='L':
split_gfa_line=gfa_line.split()
if split_gfa_line[1] == split_gfa_line[3]: #no not print self loops
continue
# print (split_gfa_line)
if split_gfa_line[5]=="0M" or split_gfa_line[5]=="-1M" or split_gfa_line[5]=="-2M": # non overlapping edges, we simply write them
print(gfa_line.strip())
continue
# if we are here, this is a true overlapping edge: L 3 + 255 - 2M
# we need to retreive the sequences of the two nodes
# print(split_gfa_line)
seqA = node_id_to_sequence[split_gfa_line[1]].upper()
seqB = node_id_to_sequence[split_gfa_line[3]].upper()
if split_gfa_line[2]=='-': seqA=kc.get_reverse_complement(seqA)
if split_gfa_line[4]=='-': seqB=kc.get_reverse_complement(seqB)
OL = overlap_length(seqA,seqB)
if OL>-1:
print (split_gfa_line[0]+"\t"+split_gfa_line[1]+"\t"+split_gfa_line[2]+"\t"+split_gfa_line[3]+"\t"+split_gfa_line[4]+"\t"+str(OL)+"M\tOFL:i:"+split_gfa_line[5][:-1])
continue
print(gfa_line.strip()) # shold not happend
kc.update_progress(1)
gfa_file.close()
compacted_facts_fa_file.close()
def main():
'''
Produces a gfa file replacing the node content from int ids of alleles to their sequence
'''
if len(sys.argv) !=3:
sys.stderr.write("Usage: python K3000_node_ids_to_node_sequences.py graph_plus.gfa compacted_facts.fa > graph_final.gfa\n")
sys.exit(0)
sys.stderr.write("Indexing sequence positions\n")
header_to_file_position = index_sequences_seek(sys.argv[2])
sys.stderr.write("Writing the updated gfa file\n")
modify_gfa_file(sys.argv[1],sys.argv[2], header_to_file_position)
if __name__ == "__main__":
main()
|
GATB/DiscoSnp
|
scripts/k3000/K3000_node_ids_to_node_sequences.py
|
Python
|
agpl-3.0
| 10,467
|
"""
"""
def decode(ciphertext):
key = ''.join([(" " * 32), "xz.~^7;>od-DF )}uS1[=cU`mGWis3MT4{N%9Zq2/Ew(&+",
"vkV:l\!hKp8fCOAR6?0|nYbI_LtPB'H<Q$Xy\"aJ@g#j5],*re"])
plaintext = ''.join([key[ord(x)] for x in ciphertext])
return (ciphertext[0:4] + "... " + plaintext)
print(decode('P~})sbs')) #"Verdana"
print(decode('ZU~5O-11g-5~}h;Y;5sh~""')) #"check SSL certificate.."
print(decode('-----iR~s<~-Js;h""""""')) #"please wait......"
print(decode('UhhWQHHh}0URs}bs*8s50}s""5!H;8vHR(v(""v;Y')) #"http://truhlarna-macura..cz/img/logo..gif"
print(decode('UhhWQHHv0s8sh~b""5(8HW}0~dsHR(v(""v;Y')) #"http://guamaten..com/prueba/logo..gif"
print(decode('UhhWQHHW}~Y8sF0s""5(8Hf50<0);HR(v(""v;Y')) #"http://prefmaqua..com/_cusudi/logo..gif"
print(decode('B(-;bh~}b~h-s55~<<""-?0}b-(YY-sbq-Y;}~JsRR-(}-sbh;*N;}0<-<(YhJs}~-sb)-h}q-svs;b""')) #"No internet access.. Turn off any firewall or anti-virus software and try again..""
print(decode('I}}(}')) #"Error"
print(decode('?U~-Y;R~-;<-5(}}0Wh~)-sb)-5sbb(h-d~-(W~b~)')) #"The file is corrupted and cannot be opened"
print(decode('15};Wh;bv"",;R~1q<h~8[dx~5h')) #"Scripting..FileSystemObject"
print(decode('u~5U(-(YY')) #"@echo off"
print(decode('W;bu-G2""2G""@@""G=-*b-2-*J-G```-\'-B6g')) #"pin@ 21.12.44.23 -n 1 -w 2000 NUL"
print(decode('<hs}h-')) #"start"
print(decode('[W~b')) #"Open"
|
martyn-smith/ursnif_decoder
|
acuzamu.py
|
Python
|
lgpl-3.0
| 1,362
|
#! /usr/bin/env python
"""
Module with post-processing related functions called from within the NFC
algorithm.
"""
__all__ = ['cube_planet_free']
import numpy as np
from ..phot import cube_inject_companions
import math
from matplotlib.pyplot import plot, xlim, ylim, hold, axes, gca, show
def cube_planet_free(planet_parameter, cube, angs, psfn, plsc):
"""
Return a cube in which we have injected negative fake companion at the
position/flux given by planet_parameter.
Parameters
----------
planet_parameter: numpy.array or list
The (r, theta, flux) for all known companions.
cube: numpy.array
The cube of fits images expressed as a numpy.array.
angs: numpy.array
The parallactic angle fits image expressed as a numpy.array.
psfsn: numpy.array
The scaled psf expressed as a numpy.array.
plsc: float
The platescale, in arcsec per pixel.
Returns
-------
cpf : numpy.array
The cube with negative companions injected at the position given in
planet_parameter.
"""
cpf = np.zeros_like(cube)
planet_parameter = np.array(planet_parameter)
for i in range(planet_parameter.shape[0]):
if i == 0:
cube_temp = cube
else:
cube_temp = cpf
cpf = cube_inject_companions(cube_temp, psfn, angs,
flevel=-planet_parameter[i, 2],
plsc=plsc, rad_dists=[planet_parameter[i, 0]],
n_branches=1, theta=planet_parameter[i, 1],
verbose=False)
return cpf
def radial_to_eq(r=1, t=0, rError=0, tError=0, display=False):
"""
Convert the position given in (r,t) into \delta RA and \delta DEC, as
well as the corresponding uncertainties.
t = 0 deg (resp. 90 deg) points toward North (resp. East).
Parameters
----------
r: float
The radial coordinate.
t: float
The angular coordinate.
rError: float
The error bar related to r.
tError: float
The error bar related to t.
display: boolean, optional
If True, a figure illustrating the error ellipse is displayed.
Returns
-------
out : tuple
((RA, RA error), (DEC, DEC error))
"""
ra = (r * np.sin(math.radians(t)))
dec = (r * np.cos(math.radians(t)))
u, v = (ra, dec)
nu = np.mod(np.pi/2.-math.radians(t), 2*np.pi)
a, b = (rError,r*np.sin(math.radians(tError)))
beta = np.linspace(0,2*np.pi,5000)
x, y = (u + (a * np.cos(beta) * np.cos(nu) - b * np.sin(beta) * np.sin(nu)),
v + (b * np.sin(beta) * np.cos(nu) + a * np.cos(beta) * np.sin(nu)))
raErrorInf = u - np.amin(x)
raErrorSup = np.amax(x) - u
decErrorInf = v - np.amin(y)
decErrorSup = np.amax(y) - v
if display:
hold(True)
plot(u,v,'ks',x,y,'r')
plot((r+rError) * np.cos(nu), (r+rError) * np.sin(nu),'ob',
(r-rError) * np.cos(nu), (r-rError) * np.sin(nu),'ob')
plot(r * np.cos(nu+math.radians(tError)),
r*np.sin(nu+math.radians(tError)),'ok')
plot(r * np.cos(nu-math.radians(tError)),
r*np.sin(nu-math.radians(tError)),'ok')
plot(0,0,'og',np.cos(np.linspace(0,2*np.pi,10000)) * r,
np.sin(np.linspace(0,2*np.pi,10000)) * r,'y')
plot([0,r*np.cos(nu+math.radians(tError*0))],
[0,r*np.sin(nu+math.radians(tError*0))],'k')
axes().set_aspect('equal')
lim = np.amax([a,b]) * 2.
xlim([ra-lim,ra+lim])
ylim([dec-lim,dec+lim])
gca().invert_xaxis()
show()
return ((ra,np.mean([raErrorInf,raErrorSup])),
(dec,np.mean([decErrorInf,decErrorSup])))
def cart_to_polar(y, x, ceny=0, cenx=0):
"""
Convert cartesian into polar coordinates (r,theta) with
respect to a given center (cenx,ceny).
Parameters
----------
x,y: float
The cartesian coordinates.
Returns
-------
out : tuple
The polar coordinates (r,theta) with respect to the (cenx,ceny).
Note that theta is given in degrees.
"""
r = np.sqrt((y-ceny)**2 + (x-cenx)**2)
theta = np.degrees(np.arctan2(y-ceny, x-cenx))
return (r,np.mod(theta,360))
def polar_to_cart(r, theta, ceny=0, cenx=0):
"""
Convert polar coordinates with respect to the center (cenx,ceny) into
cartesian coordinates (x,y) with respect to the bottom left corner of the
image..
Parameters
----------
r,theta: float
The polar coordinates.
Returns
-------
out : tuple
The cartesian coordinates (x,y) with respect to the bottom left corner
of the image..
"""
x = r*np.cos(np.deg2rad(theta)) + cenx
y = r*np.sin(np.deg2rad(theta)) + ceny
return (x,y)
def ds9index_to_polar(y, x, ceny=0, cenx=0):
"""
Convert pixel index read on image displayed with DS9 into polar coordinates
(r,theta) with respect to a given center (cenx,ceny).
Note that ds9 index (x,y) = Python matrix index (y,x). Furthermore, when an
image M is displayed with DS9, the coordinates of the center of the pixel
associated with M[0,0] is (1,1). Then, there is a shift of (0.5, 0.5) of the
center of the coordinate system. As a conclusion, when you read (x_ds9, y_ds9)
on a image displayed with DS9, the corresponding position is (y-0.5, x-0.5)
and the associated pixel value is M(np.floor(y)-1,np.floor(x)-1).
Parameters
----------
x,y: float
The pixel index in DS9
Returns
-------
out : tuple
The polar coordinates (r,theta) with respect to the (cenx,ceny).
Note that theta is given in degrees.
"""
r = np.sqrt((y-0.5-ceny)**2 + (x-0.5-cenx)**2)
theta = np.degrees(np.arctan2(y-0.5-ceny, x-0.5-cenx))
return (r,np.mod(theta,360))
def polar_to_ds9index(r, theta, ceny=0, cenx=0):
"""
Convert position (r,theta) in an image with respect to a given center
(cenx,ceny) into position in the image displayed with DS9.
Note that ds9 index (x,y) = Python matrix index (y,x). Furthermore, when an
image M is displayed with DS9, the coordinates of the center of the pixel
associated with M[0,0] is (1,1). Then, there is a shift of (0.5, 0.5) of the
center of the coordinate system. As a conclusion, when you read (x_ds9, y_ds9)
on a image displayed with DS9, the corresponding position is (y-0.5, x-0.5)
and the associated pixel value is M(np.floor(y)-1,np.floor(x)-1).
Parameters
----------
x,y: float
The pixel index in DS9
Returns
-------
out : tuple
The polar coordinates (r,theta) with respect to the (cenx,ceny).
Note that theta is given in degrees.
"""
x_ds9 = r*np.cos(np.deg2rad(theta)) + 0.5 + cenx
y_ds9 = r*np.sin(np.deg2rad(theta)) + 0.5 + ceny
return (x_ds9,y_ds9)
|
henry-ngo/VIP
|
vip_hci/negfc/utils_negfc.py
|
Python
|
mit
| 7,251
|
"""
@file
@brief Subpart related to the documentation generation.
"""
from .conf_path_tools import find_graphviz_dot
from .default_conf import set_sphinx_variables, custom_setup
from .helpgen_exceptions import HelpGenException, ImportErrorHelpGen, HelpGenConvertError
from .help_usage import get_help_usage
from .pandoc_helper import latex2rst
from .process_notebook_api import nb2slides, nb2html, nb2rst
from .rst_converters import rst2html, docstring2html, rst2rst_folder
from .sphinx_helper import sphinx_add_scripts
# Disable to speed up import time.
# from .sphinx_main import generate_help_sphinx, process_notebooks
from .utils_sphinx_config import NbImage
from .utils_pywin32 import import_pywin32
|
sdpython/pyquickhelper
|
src/pyquickhelper/helpgen/__init__.py
|
Python
|
mit
| 705
|
"""
WSGI config for myproject project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "myproject.settings")
from django.core.wsgi import get_wsgi_application # flake8: noqa
application = get_wsgi_application()
|
Perkville/django-tastypie
|
docs/code/myproject/wsgi.py
|
Python
|
bsd-3-clause
| 409
|
# -*- coding: UTF-8 -*-
from scheduler import Scheduler
from state import StateMachine
import datetime
import logging
import timer
import asyncio
class Engine:
"""Core of the application"""
def __init__(self, channels):
"""
:param channels: a list of Channel object
"""
self._channels = channels
self._statemachine = {}
self.__savestartdate = {}
self.__saveenddate = {}
self._currentstate = {}
self._timer = {}
for ch in self._channels:
self._statemachine[ch.nb] = StateMachine()
self._statemachine[ch.nb].register("NotRunning", self._not_running, [ch])
self._statemachine[ch.nb].register("Running", self._running, [ch])
self._statemachine[ch.nb].register("ManualOn", self._manual_on, [ch])
self._statemachine[ch.nb].register("ManualOff", self._manual_off, [ch])
self._statemachine[ch.nb].setState("NotRunning")
self._currentstate[ch.nb] = {'nb': ch.nb, 'state': "NotRunning"}
self._timer[ch.nb] = timer.Timer()
self._oldstate = self._currentstate.copy()
self._sched = Scheduler(self.run)
self._logger = logging.getLogger('sprinkler')
self._event_new_state = asyncio.Event()
def get_event_new_state(self):
return self._event_new_state
@staticmethod
def get_datetime_now():
return datetime.datetime.now()
def run(self):
self._logger.debug("Running engine...")
for ch in self._channels:
self._statemachine[ch.nb].run()
def _manual_on(self, channel):
""" Force running """
channel.running = True
if channel.manual == "OFF":
self._logger.info(f"Channel {channel.name} ({channel.nb}) forced OFF")
self._statemachine[channel.nb].setState("ManualOff")
self._save_channel_state(channel.nb, "ManualOff")
self._statemachine[channel.nb].run()
elif channel.manual == "AUTO":
self._logger.info(f"Channel {channel.name} ({channel.nb}) set in program mode")
self._statemachine[channel.nb].setState("NotRunning")
self._save_channel_state(channel.nb, "NotRunning")
self._statemachine[channel.nb].run()
def _manual_off(self, channel):
""" Force stop """
channel.running = False
if channel.manual == "ON":
self._logger.info(f"Channel '{channel.name}' ({channel.nb}) forced ON")
self._statemachine[channel.nb].setState("ManualOn")
self._statemachine[channel.nb].run()
elif channel.manual == "AUTO":
self._logger.info(f"Channel '{channel.name}' ({channel.nb}) set in program mode")
self._statemachine[channel.nb].setState("NotRunning")
self._save_channel_state(channel.nb, "NotRunning")
self._statemachine[channel.nb].run()
def _running(self, channel):
""" When channel is running """
if channel.manual == "OFF":
self._logger.info(f"Channel '{channel.name}' ({channel.nb}) forced OFF")
self._statemachine[channel.nb].setState("ManualOff")
self._save_channel_state(channel.nb, "ManualOff")
self._statemachine[channel.nb].run()
elif channel.manual == "ON":
self._logger.info(f"Channel '{channel.name}' ({channel.nb}) forced ON")
self._statemachine[channel.nb].setState("ManualOn")
self._statemachine[channel.nb].run()
else:
channel_status = []
if channel.isenable:
for prog in channel.progs:
if prog.isactive:
day = self.get_datetime_now().weekday()
if prog.get_one_day(day):
# Programme is active for today
now = self.get_datetime_now()
self._logger.debug(f"{channel.name}: Start date: {self.__savestartdate[channel.nb].isoformat()}")
self._logger.debug(f"{channel.name} End date: {self.__saveenddate[channel.nb].isoformat()}")
self._logger.debug(f"Now: {now.isoformat()}")
channel_status.append(
self.__savestartdate[channel.nb] <= now < self.__saveenddate[channel.nb])
if True in channel_status:
channel.running = True
self._save_channel_state(channel.nb, "Running")
else:
self._logger.info(f"Channel '{channel.name}' ({channel.nb}) is now OFF")
self._statemachine[channel.nb].setState("NotRunning")
self._save_channel_state(channel.nb, "NotRunning")
self._statemachine[channel.nb].run()
channel.running = False
def _not_running(self, channel):
""" When channel is not running """
if channel.manual == "OFF":
self._logger.info(f"Channel {channel.name} ({channel.nb}) forced OFF")
self._statemachine[channel.nb].setState("ManualOff")
self._save_channel_state(channel.nb, "ManualOff")
self._statemachine[channel.nb].run()
elif channel.manual == "ON":
self._logger.info(f"Channel {channel.name} ({channel.nb}) forced ON")
self._statemachine[channel.nb].setState("ManualOn")
self._statemachine[channel.nb].run()
else:
channel_status = []
if channel.isenable:
for prog in channel.progs:
if prog.isactive:
day = self.get_datetime_now().weekday()
if prog.get_one_day(day):
# Programme is active for today
now = self.get_datetime_now()
start = prog.stime.startDate(now)
end = prog.stime.endDate(now)
self._logger.debug(f"{channel.name} Start date: {start.isoformat()}")
self._logger.debug(f"{channel.name} End date: {end.isoformat()}")
self._logger.debug(f"Now: {now.isoformat()}")
channel_status.append(start <= now < end)
if True in channel_status:
self._logger.info(f"Channel '{channel.name}' ({channel.nb}) is now ON ")
self._statemachine[channel.nb].setState("Running")
self._save_channel_state(channel.nb, "Running")
# save start and end date to prevent false detection around
# midnight
self.__savestartdate[channel.nb] = start
self.__saveenddate[channel.nb] = end
channel.running = True
self._statemachine[channel.nb].run()
else:
channel.running = False
self._save_channel_state(channel.nb, "NotRunning")
def _save_channel_state(self, channel_nb, state, duration=0):
# replace current value
if state == "ManualOn":
self._currentstate[channel_nb] = {'nb': channel_nb, 'state': state, 'duration': duration}
else:
self._currentstate[channel_nb] = {'nb': channel_nb, 'state': state}
# Notify if there is some change
if self._currentstate[channel_nb] != self._oldstate[channel_nb]:
self._event_new_state.set()
self._oldstate[channel_nb] = self._currentstate[channel_nb]
def channel_forced(self, nb, action, duration=0):
""" Set channel action
:param nb: channel number
:param action: channel action. "ON", "OFF", "AUTO"
:param duration: when action is ON, the duration in minutes of the sprinkler"""
for ch in self._channels:
if nb == ch.nb:
if action in ("OFF", "AUTO"):
self._logger.info(f"Channel {ch.name} ({ch.nb}) forced to {action}")
if action == "OFF":
self._save_channel_state(nb, "ManualOff")
else:
self._save_channel_state(nb, "NotRunning")
self._timer[ch.nb].cancel()
ch.manual = action
elif action == "ON" and duration != 0:
self._logger.info(f"Channel {ch.name} ({ch.nb}) forced to ON for {duration} minutes")
self._save_channel_state(nb, "ManualOn", duration)
# Remove all already forced sprinkler
self._timer[ch.nb].cancel()
self._timer[ch.nb] = timer.Timer(duration*60, self._stop_ch_after_delay, args=(ch.nb,))
ch.manual = "ON"
self.run()
async def _stop_ch_after_delay(self, nb):
""" Callback called to switch channel ch to AUTO after a delay """
self._logger.info(f"Channel n°{nb}: end of forced ON")
for ch in self._channels:
if nb == ch.nb:
ch.manual = "AUTO"
self.run()
def get_channel_state(self):
return [x for x in self._currentstate.values()]
def stop(self):
for nb in self._timer:
self._timer[nb].cancel()
self._sched.cancel()
|
pade/sprinkler
|
src/engine.py
|
Python
|
gpl-3.0
| 9,380
|
from sqlalchemy import (
Column,
Float,
Index,
Integer,
Unicode,
UnicodeText
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import (
scoped_session,
sessionmaker
)
from zope.sqlalchemy import ZopeTransactionExtension
import time
DBSession = scoped_session(sessionmaker(extension=ZopeTransactionExtension()))
Base = declarative_base()
### TODO: group should be called group_id. id should be a foreign key.
class Users(Base):
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
name = Column(Unicode(100))
group = Column(Integer)
password = Column(UnicodeText)
def __init__(self, name, group, password):
self.name = name
self.group = group
self.password = password
### TODO: authorid is a foreign key. categoryid is a foreign key.
class Posts(Base):
__tablename__ = 'posts'
id = Column(Integer, primary_key=True)
date = Column(Float)
title = Column(UnicodeText)
authorid = Column(Integer)
categoryid = Column(Integer)
post = Column(UnicodeText)
def __init__(self, title, authorid, categoryid, post):
self.title = title
self.date = time.time()
self.authorid = authorid
self.categoryid = categoryid
self.post = post
### TODO: id is a foreign key.
class Categories(Base):
__tablename__ = 'categories'
id = Column(Integer, primary_key=True)
name = Column(UnicodeText)
def __init__(self, name):
self.name = name
|
zmarvel/coffeespot
|
coffeespot/models/tables.py
|
Python
|
gpl-2.0
| 1,545
|
import celery
def test_run(django_scheduler, django_schedule):
django_scheduler.set_task()
assert(isinstance(django_scheduler.celery_task, celery.Task))
def test_add_schedule(django_scheduler, django_schedule):
assert(not django_scheduler.jobs)
django_scheduler.add(django_schedule)
assert(django_scheduler.jobs)
def test_remove_schedule(django_scheduler, django_schedule):
django_scheduler.add(django_schedule)
assert(django_scheduler.jobs)
django_scheduler.remove(django_schedule)
assert(not django_scheduler.jobs)
|
kuc2477/news
|
tests/test_scheduler.py
|
Python
|
mit
| 559
|
# coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
#
# URL: https://sickrage.github.io
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function, unicode_literals
import datetime
import sys
from six.moves import urllib
import sickbeard
from sickbeard.common import Quality, USER_AGENT
from sickrage.helper.common import dateTimeFormat
class SickBeardURLopener(urllib.request.FancyURLopener, object):
version = USER_AGENT
class SearchResult(object): # pylint: disable=too-few-public-methods, too-many-instance-attributes
"""
Represents a search result from an indexer.
"""
def __init__(self, episodes):
self.provider = None
# release show object
self.show = None
# URL to the NZB/torrent file
self.url = ''
# used by some providers to store extra info associated with the result
self.extraInfo = []
# list of TVEpisode objects that this result is associated with
self.episodes = episodes
# quality of the release
self.quality = Quality.UNKNOWN
# release name
self.name = ''
# size of the release (-1 = n/a)
self.size = -1
# release group
self.release_group = ''
# version
self.version = -1
# hash
self.hash = None
# content
self.content = None
self.resultType = ''
def __str__(self):
if self.provider is None:
return 'Invalid provider, unable to print self'
my_string = '{0} @ {1}\n'.format(self.provider.name, self.url)
my_string += 'Extra Info:\n'
for extra in self.extraInfo:
my_string += ' {0}\n'.format(extra)
my_string += 'Episodes:\n'
for ep in self.episodes:
my_string += ' {0}\n'.format(ep)
my_string += 'Quality: {0}\n'.format(Quality.qualityStrings[self.quality])
my_string += 'Name: {0}\n'.format(self.name)
my_string += 'Size: {0}\n'.format(self.size)
my_string += 'Release Group: {0}\n'.format(self.release_group)
return my_string
def fileName(self):
return '{0}.{1}'.format(self.episodes[0].prettyName(), self.resultType)
class NZBSearchResult(SearchResult): # pylint: disable=too-few-public-methods
"""
Regular NZB result with an URL to the NZB
"""
def __init__(self, episodes):
super(NZBSearchResult, self).__init__(episodes)
self.resultType = 'nzb'
class NZBDataSearchResult(SearchResult): # pylint: disable=too-few-public-methods
"""
NZB result where the actual NZB XML data is stored in the extraInfo
"""
def __init__(self, episodes):
super(NZBDataSearchResult, self).__init__(episodes)
self.resultType = 'nzbdata'
class TorrentSearchResult(SearchResult): # pylint: disable=too-few-public-methods
"""
Torrent result with an URL to the torrent
"""
def __init__(self, episodes):
super(TorrentSearchResult, self).__init__(episodes)
self.resultType = 'torrent'
class AllShowsListUI(object): # pylint: disable=too-few-public-methods
"""
This class is for indexer api.
Instead of prompting with a UI to pick the desired result out of a
list of shows it tries to be smart about it based on what shows
are in SickRage.
"""
def __init__(self, config, log=None):
self.config = config
self.log = log
def selectSeries(self, all_results):
search_results = []
# get all available shows
if all_results and 'searchterm' in self.config:
show_id_list = {int(x.indexerid) for x in sickbeard.showList if x}
for curShow in all_results:
if curShow in search_results:
continue
if 'seriesname' not in curShow:
continue
try:
# We need to know if it's in our show list already
curShow['in_show_list'] = int(curShow.get('id')) in show_id_list
except Exception: # If it doesnt have an id, we cant use it anyways.
continue
if 'firstaired' not in curShow:
curShow['firstaired'] = 'Unknown'
if curShow not in search_results:
search_results += [curShow]
return search_results
class ShowListUI(object): # pylint: disable=too-few-public-methods
"""
This class is for tvdb-api.
Instead of prompting with a UI to pick the desired result out of a
list of shows it tries to be smart about it based on what shows
are in SickRage.
"""
def __init__(self, config, log=None):
self.config = config
self.log = log
@staticmethod
def selectSeries(all_results):
# try to pick a show that's in my show list
show_id_list = {int(x.indexerid) for x in sickbeard.showList if x}
for curShow in all_results:
try:
if int(curShow.get('id')) in show_id_list:
return curShow
except Exception:
pass
# if nothing matches then return first result
return all_results[0]
class Proper(object): # pylint: disable=too-few-public-methods, too-many-instance-attributes
def __init__(self, name, url, date, show):
self.name = name
self.url = url
self.date = date
self.provider = None
self.quality = Quality.UNKNOWN
self.release_group = None
self.version = -1
self.show = show
self.indexer = None
self.indexerid = -1
self.season = -1
self.episode = -1
self.scene_season = -1
self.scene_episode = -1
def __str__(self):
return '{date} {name} {season}x{episode} of {series_id} from {indexer}'.format(
date=self.date, name=self.name, season=self.season, episode=self.episode,
series_id=self.indexerid, indexer=sickbeard.indexerApi(self.indexer).name)
class ErrorViewer(object):
"""
Keeps a static list of UIErrors to be displayed on the UI and allows
the list to be cleared.
"""
errors = []
def __init__(self):
ErrorViewer.errors = []
@staticmethod
def add(error):
ErrorViewer.errors = [e for e in ErrorViewer.errors if e.message != error.message]
ErrorViewer.errors.append(error)
@staticmethod
def clear():
ErrorViewer.errors = []
@staticmethod
def get():
return ErrorViewer.errors
class WarningViewer(object):
"""
Keeps a static list of (warning) UIErrors to be displayed on the UI and allows
the list to be cleared.
"""
errors = []
def __init__(self):
WarningViewer.errors = []
@staticmethod
def add(error):
WarningViewer.errors = [e for e in WarningViewer.errors if e.message != error.message]
WarningViewer.errors.append(error)
@staticmethod
def clear():
WarningViewer.errors = []
@staticmethod
def get():
return WarningViewer.errors
class UIError(object): # pylint: disable=too-few-public-methods
"""
Represents an error to be displayed in the web UI.
"""
def __init__(self, message):
self.title = sys.exc_info()[-2] or message
self.message = message
self.time = datetime.datetime.now().strftime(dateTimeFormat)
|
b0ttl3z/SickRage
|
sickbeard/classes.py
|
Python
|
gpl-3.0
| 8,105
|
import sys
import os
import pickle
import re
import getpass
from mechanicalsoup import Browser
from .config import CONFIG_DIR_NAME
def login(username=None, password=None):
if username is None:
username = input('Please provide username: ')
if password is None:
password = getpass.getpass('Please provide password: ')
config_dir_path = os.path.join(
os.path.expanduser('~'),
CONFIG_DIR_NAME
)
pickle_path = os.path.join(
config_dir_path,
'browser.pickle'
)
if os.path.isfile(pickle_path):
try:
with open(pickle_path, 'rb') as file:
data = pickle.load(file)
if data['username'] == username and \
data['password'] == password:
return data['browser']
except:
pass
browser = Browser()
login_url = 'https://www.kaggle.com/account/login'
login_page = browser.get(login_url)
token = re.search(
'antiForgeryToken: \'(?P<token>.+)\'',
str(login_page.soup)
).group(1)
login_result_page = browser.post(
login_url,
data={
'username': username,
'password': password,
'__RequestVerificationToken': token
}
)
error_match = re.search(
'"errors":\["(?P<error>.+)"\]',
str(login_result_page.soup)
)
if error_match:
print(error_match.group(1))
return
if not os.path.isdir(config_dir_path):
os.mkdir(config_dir_path, 0o700)
with open(pickle_path, 'wb') as f:
pickle.dump(dict(
username=username, password=password, browser=browser
), f)
return browser
|
floydwch/kaggle-cli
|
kaggle_cli/common.py
|
Python
|
mit
| 1,733
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "PracticaP5.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
PaulDiaconescu/pentagram
|
PracticaP5/manage.py
|
Python
|
gpl-3.0
| 263
|
# coding: utf-8
"""
Utilities for dealing with text encodings
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008-2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import sys
import locale
import warnings
# to deal with the possibility of sys.std* not being a stream at all
def get_stream_enc(stream, default=None):
"""Return the given stream's encoding or a default.
There are cases where ``sys.std*`` might not actually be a stream, so
check for the encoding attribute prior to returning it, and return
a default if it doesn't exist or evaluates as False. ``default``
is None if not provided.
"""
if not hasattr(stream, 'encoding') or not stream.encoding:
return default
else:
return stream.encoding
# Less conservative replacement for sys.getdefaultencoding, that will try
# to match the environment.
# Defined here as central function, so if we find better choices, we
# won't need to make changes all over IPython.
def getdefaultencoding(prefer_stream=True):
"""Return IPython's guess for the default encoding for bytes as text.
If prefer_stream is True (default), asks for stdin.encoding first,
to match the calling Terminal, but that is often None for subprocesses.
Then fall back on locale.getpreferredencoding(),
which should be a sensible platform default (that respects LANG environment),
and finally to sys.getdefaultencoding() which is the most conservative option,
and usually ASCII on Python 2 or UTF8 on Python 3.
"""
enc = None
if prefer_stream:
enc = get_stream_enc(sys.stdin)
if not enc or enc == 'ascii':
try:
# There are reports of getpreferredencoding raising errors
# in some cases, which may well be fixed, but let's be conservative
# here.
enc = locale.getpreferredencoding()
except Exception:
pass
enc = enc or sys.getdefaultencoding()
# On windows `cp0` can be returned to indicate that there is no code page.
# Since cp0 is an invalid encoding return instead cp1252 which is the
# Western European default.
if enc == 'cp0':
warnings.warn(
"Invalid code page cp0 detected - using cp1252 instead."
"If cp1252 is incorrect please ensure a valid code page "
"is defined for the process.", RuntimeWarning)
return 'cp1252'
return enc
DEFAULT_ENCODING = getdefaultencoding()
|
mattvonrocketstein/smash
|
smashlib/ipy3x/utils/encoding.py
|
Python
|
mit
| 2,881
|
# Tweepy
# Copyright 2009-2022 Joshua Roesslein
# See LICENSE for details.
"""
Tweepy Twitter API library
"""
__version__ = '4.6.0'
__author__ = 'Joshua Roesslein'
__license__ = 'MIT'
from tweepy.api import API
from tweepy.auth import (
AppAuthHandler, OAuthHandler, OAuth1UserHandler, OAuth2AppHandler,
OAuth2BearerHandler, OAuth2UserHandler
)
from tweepy.cache import Cache, FileCache, MemoryCache
from tweepy.client import Client, Response
from tweepy.cursor import Cursor
from tweepy.errors import (
BadRequest, Forbidden, HTTPException, NotFound, TooManyRequests,
TweepyException, TwitterServerError, Unauthorized
)
from tweepy.list import List
from tweepy.media import Media
from tweepy.pagination import Paginator
from tweepy.place import Place
from tweepy.poll import Poll
from tweepy.space import Space
from tweepy.streaming import (
Stream, StreamingClient, StreamResponse, StreamRule
)
from tweepy.tweet import ReferencedTweet, Tweet
from tweepy.user import User
# Global, unauthenticated instance of API
api = API()
|
tweepy/tweepy
|
tweepy/__init__.py
|
Python
|
mit
| 1,051
|
import json
import random
random_samples = {
"documents": []
}
for i in range(200):
new_sample = {
"id": "twitter_handle_ayy"+str(i),
"user": "twitter_handle_ayy"+str(i),
"type": "activity",
"vote": random.choice(["true", "false"]),
"lat": str(random.uniform(56.0, 58.0)),
"lng": str(random.uniform(21.0, 28.0)),
"photo_link": "This is my photo!"
}
random_samples["documents"].append(new_sample)
print json.dumps(random_samples)
|
charleslai/geotap-node
|
make_samples.py
|
Python
|
mit
| 449
|
## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## Youshould have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio BibRank Administrator Interface."""
__revision__ = "$Id$"
import os
import ConfigParser
from invenio.config import \
CFG_SITE_LANG, \
CFG_ETCDIR, \
CFG_SITE_URL
import invenio.access_control_engine as acce
from invenio.messages import language_list_long
from invenio.dbquery import run_sql
def getnavtrail(previous = ''):
navtrail = """<a class="navtrail" href="%s/help/admin">Admin Area</a> """ % (CFG_SITE_URL,)
navtrail = navtrail + previous
return navtrail
def check_user(req, role, adminarea=2, authorized=0):
(auth_code, auth_message) = is_adminuser(req, role)
if not authorized and auth_code != 0:
return ("false", auth_message)
return ("", auth_message)
def is_adminuser(req, role):
"""check if user is a registered administrator. """
return acce.acc_authorize_action(req, role)
def perform_index(ln=CFG_SITE_LANG):
"""create the bibrank main area menu page."""
header = ['Code', 'Translations', 'Collections', 'Rank method']
rnk_list = get_def_name('', "rnkMETHOD")
actions = []
for (rnkID, name) in rnk_list:
actions.append([name])
for col in [(('Modify', 'modifytranslations'),),
(('Modify', 'modifycollection'),),
(('Show Details', 'showrankdetails'),
('Modify', 'modifyrank'),
('Delete', 'deleterank'))]:
actions[-1].append('<a href="%s/admin/bibrank/bibrankadmin.py/%s?rnkID=%s&ln=%s">%s</a>' % (CFG_SITE_URL, col[0][1], rnkID, ln, col[0][0]))
for (str, function) in col[1:]:
actions[-1][-1] += ' / <a href="%s/admin/bibrank/bibrankadmin.py/%s?rnkID=%s&ln=%s">%s</a>' % (CFG_SITE_URL, function, rnkID, ln, str)
output = """
<a href="%s/admin/bibrank/bibrankadmin.py/addrankarea?ln=%s">Add new rank method</a><br /><br />
""" % (CFG_SITE_URL, ln)
output += tupletotable(header=header, tuple=actions)
return addadminbox("""Overview of rank methods <small>[<a title="See guide" href="%s/help/admin/bibrank-admin-guide#mi">?</a>]</small>""" % CFG_SITE_URL, datalist=[output, ''])
def perform_modifycollection(rnkID='', ln=CFG_SITE_LANG, func='', colID='', confirm=0):
"""Modify which collections the rank method is visible to"""
output = ""
subtitle = ""
if rnkID:
rnkNAME = get_def_name(rnkID, "rnkMETHOD")[0][1]
if func in ["0", 0] and confirm in ["1", 1]:
finresult = attach_col_rnk(rnkID, colID)
elif func in ["1", 1] and confirm in ["1", 1]:
finresult = detach_col_rnk(rnkID, colID)
if colID:
colNAME = get_def_name(colID, "collection")[0][1]
subtitle = """Step 1 - Select collection to enable/disable rank method '%s' for""" % rnkNAME
output = """
<dl>
<dt>The rank method is currently enabled for these collections:</dt>
<dd>
"""
col_list = get_rnk_col(rnkID, ln)
if not col_list:
output += """No collections"""
else:
for (id, name) in col_list:
output += """%s, """ % name
output += """</dd>
</dl>
"""
col_list = get_def_name('', "collection")
col_rnk = dict(get_rnk_col(rnkID))
col_list = filter(lambda x: not col_rnk.has_key(x[0]), col_list)
if col_list:
text = """
<span class="adminlabel">Enable for:</span>
<select name="colID" class="admin_w200">
<option value="">- select collection -</option>
"""
for (id, name) in col_list:
text += """<option value="%s" %s>%s</option>""" % (id, (func in ["0", 0] and confirm in ["0", 0] and colID and int(colID) == int(id)) and 'selected="selected"' or '' , name)
text += """</select>"""
output += createhiddenform(action="modifycollection",
text=text,
button="Enable",
rnkID=rnkID,
ln=ln,
func=0,
confirm=1)
if confirm in ["0", 0] and func in ["0", 0] and colID:
subtitle = "Step 2 - Confirm to enable rank method for the chosen collection"
text = "<b><p>Please confirm to enable rank method '%s' for the collection '%s'</p></b>" % (rnkNAME, colNAME)
output += createhiddenform(action="modifycollection",
text=text,
button="Confirm",
rnkID=rnkID,
ln=ln,
colID=colID,
func=0,
confirm=1)
elif confirm in ["1", 1] and func in ["0", 0] and colID:
subtitle = "Step 3 - Result"
output += write_outcome(finresult)
elif confirm not in ["0", 0] and func in ["0", 0]:
output += """<b><span class="info">Please select a collection.</span></b>"""
col_list = get_rnk_col(rnkID, ln)
if col_list:
text = """
<span class="adminlabel">Disable for:</span>
<select name="colID" class="admin_w200">
<option value="">- select collection -</option>
"""
for (id, name) in col_list:
text += """<option value="%s" %s>%s</option>""" % (id, (func in ["1", 1] and confirm in ["0", 0] and colID and int(colID) == int(id)) and 'selected="selected"' or '' , name)
text += """</select>"""
output += createhiddenform(action="modifycollection",
text=text,
button="Disable",
rnkID=rnkID,
ln=ln,
func=1,
confirm=1)
if confirm in ["1", 1] and func in ["1", 1] and colID:
subtitle = "Step 3 - Result"
output += write_outcome(finresult)
elif confirm not in ["0", 0] and func in ["1", 1]:
output += """<b><span class="info">Please select a collection.</span></b>"""
body = [output]
return addadminbox(subtitle + """ <small>[<a title="See guide" href="%s/help/admin/bibrank-admin-guide#mc">?</a>]</small>""" % CFG_SITE_URL, body)
def perform_modifytranslations(rnkID, ln, sel_type, trans, confirm, callback='yes'):
"""Modify the translations of a rank method"""
output = ''
subtitle = ''
langs = get_languages()
langs.sort()
if confirm in ["2", 2] and rnkID:
finresult = modify_translations(rnkID, langs, sel_type, trans, "rnkMETHOD")
rnk_name = get_def_name(rnkID, "rnkMETHOD")[0][1]
rnk_dict = dict(get_i8n_name('', ln, get_rnk_nametypes()[0][0], "rnkMETHOD"))
if rnkID and rnk_dict.has_key(int(rnkID)):
rnkID = int(rnkID)
subtitle = """<a name="3">3. Modify translations for rank method '%s'</a>""" % rnk_name
if type(trans) is str:
trans = [trans]
if sel_type == '':
sel_type = get_rnk_nametypes()[0][0]
header = ['Language', 'Translation']
actions = []
text = """
<span class="adminlabel">Name type</span>
<select name="sel_type" class="admin_w200">
"""
types = get_rnk_nametypes()
if len(types) > 1:
for (key, value) in types:
text += """<option value="%s" %s>%s""" % (key, key == sel_type and 'selected="selected"' or '', value)
trans_names = get_name(rnkID, ln, key, "rnkMETHOD")
if trans_names and trans_names[0][0]:
text += ": %s" % trans_names[0][0]
text += "</option>"
text += """</select>"""
output += createhiddenform(action="modifytranslations",
text=text,
button="Select",
rnkID=rnkID,
ln=ln,
confirm=0)
if confirm in [-1, "-1", 0, "0"]:
trans = []
for key, value in langs:
try:
trans_names = get_name(rnkID, key, sel_type, "rnkMETHOD")
trans.append(trans_names[0][0])
except StandardError, e:
trans.append('')
for nr in range(0,len(langs)):
actions.append(["%s" % (langs[nr][1],)])
actions[-1].append('<input type="text" name="trans" size="30" value="%s"/>' % trans[nr])
text = tupletotable(header=header, tuple=actions)
output += createhiddenform(action="modifytranslations",
text=text,
button="Modify",
rnkID=rnkID,
sel_type=sel_type,
ln=ln,
confirm=2)
if sel_type and len(trans) and confirm in ["2", 2]:
output += write_outcome(finresult)
body = [output]
return addadminbox(subtitle + """ <small>[<a title="See guide" href="%s/help/admin/bibrank-admin-guide#mt">?</a>]</small>""" % CFG_SITE_URL, body)
def perform_addrankarea(rnkcode='', ln=CFG_SITE_LANG, template='', confirm=-1):
"""form to add a new rank method with these values:"""
subtitle = 'Step 1 - Create new rank method'
output = """
<dl>
<dt>BibRank code:</dt>
<dd>A unique code that identifies a rank method, is used when running the bibrank daemon and used to name the configuration file for the method.
<br />The template files includes the necessary parameters for the chosen rank method, and only needs to be edited with the correct tags and paths.
<br />For more information, please go to the <a title="See guide" href="%s/help/admin/bibrank-admin-guide">BibRank guide</a> and read the section about adding a rank method</dd>
</dl>
""" % CFG_SITE_URL
text = """
<span class="adminlabel">BibRank code</span>
<input class="admin_wvar" type="text" name="rnkcode" value="%s" />
""" % (rnkcode)
text += """<br />
<span class="adminlabel">Cfg template</span>
<select name="template" class="admin_w200">
<option value="">No template</option>
"""
templates = get_templates()
for templ in templates:
text += """<option value="%s" %s>%s</option>""" % (templ, template == templ and 'selected="selected"' or '', templ[9:len(templ)-4])
text += """</select>"""
output += createhiddenform(action="addrankarea",
text=text,
button="Add rank method",
ln=ln,
confirm=1)
if rnkcode:
if confirm in ["0", 0]:
subtitle = 'Step 2 - Confirm addition of rank method'
text = """<b>Add rank method with BibRank code: '%s'.</b>""" % (rnkcode)
if template:
text += """<br /><b>Using configuration template: '%s'.</b>""" % (template)
else:
text += """<br /><b>Create empty configuration file.</b>"""
output += createhiddenform(action="addrankarea",
text=text,
rnkcode=rnkcode,
button="Confirm",
template=template,
confirm=1)
elif confirm in ["1", 1]:
rnkID = add_rnk(rnkcode)
subtitle = "Step 3 - Result"
if rnkID[0] == 1:
rnkID = rnkID[1]
text = """<b><span class="info">Added new rank method with BibRank code '%s'</span></b>""" % rnkcode
try:
if template:
infile = open("%s/bibrank/%s" % (CFG_ETCDIR, template), 'r')
indata = infile.readlines()
infile.close()
else:
indata = ()
file = open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0]), 'w')
for line in indata:
file.write(line)
file.close()
if template:
text += """<b><span class="info"><br />Configuration file created using '%s' as template.</span></b>""" % template
else:
text += """<b><span class="info"><br />Empty configuration file created.</span></b>"""
except StandardError, e:
text += """<b><span class="info"><br />Sorry, could not create configuration file: '%s/bibrank/%s.cfg', either because it already exists, or not enough rights to create file. <br />Please create the file in the path given.</span></b>""" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0])
else:
text = """<b><span class="info">Sorry, could not add rank method, rank method with the same BibRank code probably exists.</span></b>"""
output += text
elif not rnkcode and confirm not in [-1, "-1"]:
output += """<b><span class="info">Sorry, could not add rank method, not enough data submitted.</span></b>"""
body = [output]
return addadminbox(subtitle + """ <small>[<a title="See guide" href="%s/help/admin/bibrank-admin-guide#ar">?</a>]</small>""" % CFG_SITE_URL, body)
def perform_modifyrank(rnkID, rnkcode='', ln=CFG_SITE_LANG, template='', cfgfile='', confirm=0):
"""form to modify a rank method
rnkID - id of the rank method
"""
if not rnkID:
return "No ranking method selected."
if not get_rnk_code(rnkID):
return "Ranking method %s does not seem to exist." % str(rnkID)
subtitle = 'Step 1 - Please modify the wanted values below'
if not rnkcode:
oldcode = get_rnk_code(rnkID)[0]
else:
oldcode = rnkcode
output = """
<dl>
<dd>When changing the BibRank code of a rank method, you must also change any scheduled tasks using the old value.
<br />For more information, please go to the <a title="See guide" href="%s/help/admin/bibrank-admin-guide">BibRank guide</a> and read the section about modifying a rank method's BibRank code.</dd>
</dl>
""" % CFG_SITE_URL
text = """
<span class="adminlabel">BibRank code</span>
<input class="admin_wvar" type="text" name="rnkcode" value="%s" />
<br />
""" % (oldcode)
try:
text += """<span class="adminlabel">Cfg file</span>"""
textarea = ""
if cfgfile:
textarea +=cfgfile
else:
file = open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0]))
for line in file.readlines():
textarea += line
text += """<textarea class="admin_wvar" name="cfgfile" rows="15" cols="70">""" + textarea + """</textarea>"""
except StandardError, e:
text += """<b><span class="info">Cannot load file, either it does not exist, or not enough rights to read it: '%s/bibrank/%s.cfg'<br />Please create the file in the path given.</span></b>""" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0])
output += createhiddenform(action="modifyrank",
text=text,
rnkID=rnkID,
button="Modify",
confirm=1)
if rnkcode and confirm in ["1", 1] and get_rnk_code(rnkID)[0][0] != rnkcode:
oldcode = get_rnk_code(rnkID)[0][0]
result = modify_rnk(rnkID, rnkcode)
subtitle = "Step 3 - Result"
if result:
text = """<b><span class="info">Rank method modified.</span></b>"""
try:
file = open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, oldcode), 'r')
file2 = open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, rnkcode), 'w')
lines = file.readlines()
for line in lines:
file2.write(line)
file.close()
file2.close()
os.remove("%s/bibrank/%s.cfg" % (CFG_ETCDIR, oldcode))
except StandardError, e:
text = """<b><span class="info">Sorry, could not change name of cfg file, must be done manually: '%s/bibrank/%s.cfg'</span></b>""" % (CFG_ETCDIR, oldcode)
else:
text = """<b><span class="info">Sorry, could not modify rank method.</span></b>"""
output += text
if cfgfile and confirm in ["1", 1]:
try:
file = open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0]), 'w')
file.write(cfgfile)
file.close()
text = """<b><span class="info"><br />Configuration file modified: '%s/bibrank/%s.cfg'</span></b>""" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0])
except StandardError, e:
text = """<b><span class="info"><br />Sorry, could not modify configuration file, please check for rights to do so: '%s/bibrank/%s.cfg'<br />Please modify the file manually.</span></b>""" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0])
output += text
finoutput = addadminbox(subtitle + """ <small>[<a title="See guide" href="%s/help/admin/bibrank-admin-guide#mr">?</a>]</small>""" % CFG_SITE_URL, [output])
output = ""
text = """
<span class="adminlabel">Select</span>
<select name="template" class="admin_w200">
<option value="">- select template -</option>
"""
templates = get_templates()
for templ in templates:
text += """<option value="%s" %s>%s</option>""" % (templ, template == templ and 'selected="selected"' or '', templ[9:len(templ)-4])
text += """</select><br />"""
output += createhiddenform(action="modifyrank",
text=text,
rnkID=rnkID,
button="Show template",
confirm=0)
try:
if template:
textarea = ""
text = """<span class="adminlabel">Content:</span>"""
file = open("%s/bibrank/%s" % (CFG_ETCDIR, template), 'r')
lines = file.readlines()
for line in lines:
textarea += line
file.close()
text += """<textarea class="admin_wvar" readonly="true" rows="15" cols="70">""" + textarea + """</textarea>"""
output += text
except StandardError, e:
output += """Cannot load file, either it does not exist, or not enough rights to read it: '%s/bibrank/%s'""" % (CFG_ETCDIR, template)
finoutput += addadminbox("View templates", [output])
return finoutput
def perform_deleterank(rnkID, ln=CFG_SITE_LANG, confirm=0):
"""form to delete a rank method
"""
subtitle =''
output = """
<span class="warning">
<dl>
<dt><strong>WARNING:</strong></dt>
<dd><strong>When deleting a rank method, you also deletes all data related to the rank method, like translations, which collections
it was attached to and the data necessary to rank the searchresults. Any scheduled tasks using the deleted rank method will also stop working.
<br /><br />For more information, please go to the <a title="See guide" href="%s/help/admin/bibrank-admin-guide">BibRank guide</a> and read the section regarding deleting a rank method.</strong></dd>
</dl>
</span>
""" % CFG_SITE_URL
if rnkID:
if confirm in ["0", 0]:
rnkNAME = get_def_name(rnkID, "rnkMETHOD")[0][1]
subtitle = 'Step 1 - Confirm deletion'
text = """Delete rank method '%s'.""" % (rnkNAME)
output += createhiddenform(action="deleterank",
text=text,
button="Confirm",
rnkID=rnkID,
confirm=1)
elif confirm in ["1", 1]:
try:
rnkNAME = get_def_name(rnkID, "rnkMETHOD")[0][1]
rnkcode = get_rnk_code(rnkID)[0][0]
table = ""
try:
config = ConfigParser.ConfigParser()
config.readfp(open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, rnkcode), 'r'))
table = config.get(config.get('rank_method', "function"), "table")
except Exception:
pass
result = delete_rnk(rnkID, table)
subtitle = "Step 2 - Result"
if result:
text = """<b><span class="info">Rank method deleted</span></b>"""
try:
os.remove("%s/bibrank/%s.cfg" % (CFG_ETCDIR, rnkcode))
text += """<br /><b><span class="info">Configuration file deleted: '%s/bibrank/%s.cfg'.</span></b>""" % (CFG_ETCDIR, rnkcode)
except StandardError, e:
text += """<br /><b><span class="info">Sorry, could not delete configuration file: '%s/bibrank/%s.cfg'.</span><br />Please delete the file manually.</span></b>""" % (CFG_ETCDIR, rnkcode)
else:
text = """<b><span class="info">Sorry, could not delete rank method</span></b>"""
except StandardError, e:
text = """<b><span class="info">Sorry, could not delete rank method, most likely already deleted</span></b>"""
output = text
body = [output]
return addadminbox(subtitle + """ <small>[<a title="See guide" href="%s/help/admin/bibrank-admin-guide#dr">?</a>]</small>""" % CFG_SITE_URL, body)
def perform_showrankdetails(rnkID, ln=CFG_SITE_LANG):
"""Returns details about the rank method given by rnkID"""
if not rnkID:
return "No ranking method selected."
if not get_rnk_code(rnkID):
return "Ranking method %s does not seem to exist." % str(rnkID)
subtitle = """Overview <a href="%s/admin/bibrank/bibrankadmin.py/modifyrank?rnkID=%s&ln=%s">[Modify]</a>""" % (CFG_SITE_URL, rnkID, ln)
text = """
BibRank code: %s<br />
Last updated by BibRank:
""" % (get_rnk_code(rnkID)[0][0])
if get_rnk(rnkID)[0][2]:
text += "%s<br />" % get_rnk(rnkID)[0][2]
else:
text += "Not yet run.<br />"
output = addadminbox(subtitle, [text])
subtitle = """Rank method statistics"""
text = ""
try:
text = "Not yet implemented"
except StandardError, e:
text = "BibRank not yet run, cannot show statistics for method"
output += addadminbox(subtitle, [text])
subtitle = """Attached to collections <a href="%s/admin/bibrank/bibrankadmin.py/modifycollection?rnkID=%s&ln=%s">[Modify]</a>""" % (CFG_SITE_URL, rnkID, ln)
text = ""
col = get_rnk_col(rnkID, ln)
for key, value in col:
text+= "%s<br />" % value
if not col:
text +="No collections"
output += addadminbox(subtitle, [text])
subtitle = """Translations <a href="%s/admin/bibrank/bibrankadmin.py/modifytranslations?rnkID=%s&ln=%s">[Modify]</a>""" % (CFG_SITE_URL, rnkID, ln)
prev_lang = ''
trans = get_translations(rnkID)
types = get_rnk_nametypes()
types = dict(map(lambda x: (x[0], x[1]), types))
text = ""
languages = dict(get_languages())
if trans:
for lang, type, name in trans:
if lang and languages.has_key(lang) and type and name:
if prev_lang != lang:
prev_lang = lang
text += """%s: <br />""" % (languages[lang])
if types.has_key(type):
text+= """<span style="margin-left: 10px">'%s'</span><span class="note">(%s)</span><br />""" % (name, types[type])
else:
text = """No translations exists"""
output += addadminbox(subtitle, [text])
subtitle = """Configuration file: '%s/bibrank/%s.cfg' <a href="%s/admin/bibrank/bibrankadmin.py/modifyrank?rnkID=%s&ln=%s">[Modify]</a>""" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0], CFG_SITE_URL, rnkID, ln)
text = ""
try:
file = open("%s/bibrank/%s.cfg" % (CFG_ETCDIR, get_rnk_code(rnkID)[0][0]))
text += """<pre>"""
for line in file.readlines():
text += line
text += """</pre>"""
except StandardError, e:
text = """Cannot load file, either it does not exist, or not enough rights to read it."""
output += addadminbox(subtitle, [text])
return output
def compare_on_val(second, first):
return cmp(second[1], first[1])
def get_rnk_code(rnkID):
"""Returns the name from rnkMETHOD based on argument
rnkID - id from rnkMETHOD"""
try:
res = run_sql("SELECT name FROM rnkMETHOD where id=%s" % (rnkID))
return res
except StandardError, e:
return ()
def get_rnk(rnkID=''):
"""Return one or all rank methods
rnkID - return the rank method given, or all if not given"""
try:
if rnkID:
res = run_sql("SELECT id,name,DATE_FORMAT(last_updated, '%%Y-%%m-%%d %%H:%%i:%%s') from rnkMETHOD WHERE id=%s" % rnkID)
else:
res = run_sql("SELECT id,name,DATE_FORMAT(last_updated, '%%Y-%%m-%%d %%H:%%i:%%s') from rnkMETHOD")
return res
except StandardError, e:
return ()
def get_translations(rnkID):
"""Returns the translations in rnkMETHODNAME for a rankmethod
rnkID - the id of the rankmethod from rnkMETHOD """
try:
res = run_sql("SELECT ln, type, value FROM rnkMETHODNAME where id_rnkMETHOD=%s ORDER BY ln,type" % (rnkID))
return res
except StandardError, e:
return ()
def get_rnk_nametypes():
"""Return a list of the various translationnames for the rank methods"""
type = []
type.append(('ln', 'Long name'))
#type.append(('sn', 'Short name'))
return type
def get_col_nametypes():
"""Return a list of the various translationnames for the rank methods"""
type = []
type.append(('ln', 'Long name'))
return type
def get_rnk_col(rnkID, ln=CFG_SITE_LANG):
""" Returns a list of the collections the given rank method is attached to
rnkID - id from rnkMETHOD"""
try:
res1 = dict(run_sql("SELECT id_collection, '' FROM collection_rnkMETHOD WHERE id_rnkMETHOD=%s" % rnkID))
res2 = get_def_name('', "collection")
result = filter(lambda x: res1.has_key(x[0]), res2)
return result
except StandardError, e:
return ()
def get_templates():
"""Read CFG_ETCDIR/bibrank and returns a list of all files with 'template' """
templates = []
files = os.listdir(CFG_ETCDIR + "/bibrank/")
for file in files:
if str.find(file,"template_") != -1:
templates.append(file)
return templates
def attach_col_rnk(rnkID, colID):
"""attach rank method to collection
rnkID - id from rnkMETHOD table
colID - id of collection, as in collection table """
try:
res = run_sql("INSERT INTO collection_rnkMETHOD(id_collection, id_rnkMETHOD) values (%s,%s)" % (colID, rnkID))
return (1, "")
except StandardError, e:
return (0, e)
def detach_col_rnk(rnkID, colID):
"""detach rank method from collection
rnkID - id from rnkMETHOD table
colID - id of collection, as in collection table """
try:
res = run_sql("DELETE FROM collection_rnkMETHOD WHERE id_collection=%s AND id_rnkMETHOD=%s" % (colID, rnkID))
return (1, "")
except StandardError, e:
return (0, e)
def delete_rnk(rnkID, table=""):
"""Deletes all data for the given rank method
rnkID - delete all data in the tables associated with ranking and this id """
try:
res = run_sql("DELETE FROM rnkMETHOD WHERE id=%s" % rnkID)
res = run_sql("DELETE FROM rnkMETHODNAME WHERE id_rnkMETHOD=%s" % rnkID)
res = run_sql("DELETE FROM collection_rnkMETHOD WHERE id_rnkMETHOD=%s" % rnkID)
res = run_sql("DELETE FROM rnkMETHODDATA WHERE id_rnkMETHOD=%s" % rnkID)
if table:
res = run_sql("truncate %s" % table)
res = run_sql("truncate %sR" % table[:-1])
return (1, "")
except StandardError, e:
return (0, e)
def modify_rnk(rnkID, rnkcode):
"""change the code for the rank method given
rnkID - change in rnkMETHOD where id is like this
rnkcode - new value for field 'name' in rnkMETHOD """
try:
res = run_sql("UPDATE rnkMETHOD set name=%s WHERE id=%s", (rnkcode, rnkID))
return (1, "")
except StandardError, e:
return (0, e)
def add_rnk(rnkcode):
"""Adds a new rank method to rnkMETHOD
rnkcode - the "code" for the rank method, to be used by bibrank daemon """
try:
res = run_sql("INSERT INTO rnkMETHOD (name) VALUES (%s)", (rnkcode,))
res = run_sql("SELECT id FROM rnkMETHOD WHERE name=%s", (rnkcode,))
if res:
return (1, res[0][0])
else:
raise StandardError
except StandardError, e:
return (0, e)
def addadminbox(header='', datalist=[], cls="admin_wvar"):
"""used to create table around main data on a page, row based.
header - header on top of the table
datalist - list of the data to be added row by row
cls - possible to select wich css-class to format the look of the table."""
if len(datalist) == 1: per = '100'
else: per = '75'
output = '<table class="%s" ' % (cls, ) + 'width="95%">\n'
output += """
<thead>
<tr>
<th class="adminheaderleft" colspan="%s">%s</th>
</tr>
</thead>
<tbody>
""" % (len(datalist), header)
output += ' <tr>\n'
output += """
<td style="vertical-align: top; margin-top: 5px; width: %s;">
%s
</td>
""" % (per+'%', datalist[0])
if len(datalist) > 1:
output += """
<td style="vertical-align: top; margin-top: 5px; width: %s;">
%s
</td>
""" % ('25%', datalist[1])
output += ' </tr>\n'
output += """
</tbody>
</table>
"""
return output
def tupletotable(header=[], tuple=[], start='', end='', extracolumn='', highlight_rows_p=False, alternate_row_colors_p=False):
"""create html table for a tuple.
header - optional header for the columns
tuple - create table of this
start - text to be added in the beginning, most likely beginning of a form
end - text to be added in the end, mot likely end of a form.
extracolumn - mainly used to put in a button.
highlight_rows_p - if the cursor hovering a row should highlight the full row or not
alternate_row_colors_p - if alternate background colours should be used for the rows
"""
# study first row in tuple for alignment
align = []
try:
firstrow = tuple[0]
if type(firstrow) in [int, long]:
align = ['admintdright']
elif type(firstrow) in [str, dict]:
align = ['admintdleft']
else:
for item in firstrow:
if type(item) is int:
align.append('admintdright')
else:
align.append('admintdleft')
except IndexError:
firstrow = []
tblstr = ''
for h in header + ['']:
tblstr += ' <th class="adminheader">%s</th>\n' % (h, )
if tblstr: tblstr = ' <tr>\n%s\n </tr>\n' % (tblstr, )
tblstr = start + '<table class="admin_wvar_nomargin">\n' + tblstr
# extra column
try:
extra = '<tr class="%s">' % (highlight_rows_p and 'admin_row_highlight' or '')
if type(firstrow) not in [int, long, str, dict]:
# for data in firstrow: extra += '<td class="%s">%s</td>\n' % ('admintd', data)
for i in range(len(firstrow)): extra += '<td class="%s">%s</td>\n' % (align[i], firstrow[i])
else:
extra += ' <td class="%s">%s</td>\n' % (align[0], firstrow)
extra += '<td class="extracolumn" rowspan="%s" style="vertical-align: top;">\n%s\n</td>\n</tr>\n' % (len(tuple), extracolumn)
except IndexError:
extra = ''
tblstr += extra
# for i in range(1, len(tuple)):
j = 0
for row in tuple[1:]:
j += 1
tblstr += ' <tr class="%s %s">\n' % (highlight_rows_p and 'admin_row_highlight' or '',
(j % 2 and alternate_row_colors_p) and 'admin_row_color' or '')
# row = tuple[i]
if type(row) not in [int, long, str, dict]:
# for data in row: tblstr += '<td class="admintd">%s</td>\n' % (data,)
for i in range(len(row)): tblstr += '<td class="%s">%s</td>\n' % (align[i], row[i])
else:
tblstr += ' <td class="%s">%s</td>\n' % (align[0], row)
tblstr += ' </tr> \n'
tblstr += '</table> \n '
tblstr += end
return tblstr
def tupletotable_onlyselected(header=[], tuple=[], selected=[], start='', end='', extracolumn=''):
"""create html table for a tuple.
header - optional header for the columns
tuple - create table of this
selected - indexes of selected rows in the tuple
start - put this in the beginning
end - put this in the beginning
extracolumn - mainly used to put in a button"""
tuple2 = []
for index in selected:
tuple2.append(tuple[int(index)-1])
return tupletotable(header=header,
tuple=tuple2,
start=start,
end=end,
extracolumn=extracolumn)
def addcheckboxes(datalist=[], name='authids', startindex=1, checked=[]):
"""adds checkboxes in front of the listdata.
datalist - add checkboxes in front of this list
name - name of all the checkboxes, values will be associated with this name
startindex - usually 1 because of the header
checked - values of checkboxes to be pre-checked """
if not type(checked) is list: checked = [checked]
for row in datalist:
if 1 or row[0] not in [-1, "-1", 0, "0"]: # always box, check another place
chkstr = str(startindex) in checked and 'checked="checked"' or ''
row.insert(0, '<input type="checkbox" name="%s" value="%s" %s />' % (name, startindex, chkstr))
else:
row.insert(0, '')
startindex += 1
return datalist
def createhiddenform(action="", text="", button="confirm", cnfrm='', **hidden):
"""create select with hidden values and submit button
action - name of the action to perform on submit
text - additional text, can also be used to add non hidden input
button - value/caption on the submit button
cnfrm - if given, must check checkbox to confirm
**hidden - dictionary with name=value pairs for hidden input """
output = '<form action="%s" method="post">\n' % (action, )
output += '<table>\n<tr><td style="vertical-align: top">'
output += text
if cnfrm:
output += ' <input type="checkbox" name="confirm" value="1"/>'
for key in hidden.keys():
if type(hidden[key]) is list:
for value in hidden[key]:
output += ' <input type="hidden" name="%s" value="%s"/>\n' % (key, value)
else:
output += ' <input type="hidden" name="%s" value="%s"/>\n' % (key, hidden[key])
output += '</td><td style="vertical-align: bottom">'
output += ' <input class="adminbutton" type="submit" value="%s"/>\n' % (button, )
output += '</td></tr></table>'
output += '</form>\n'
return output
def get_languages():
languages = []
for (lang, lang_namelong) in language_list_long():
languages.append((lang, lang_namelong))
languages.sort()
return languages
def get_def_name(ID, table):
"""Returns a list of the names, either with the name in the current language, the default language, or just the name from the given table
ln - a language supported by Invenio
type - the type of value wanted, like 'ln', 'sn'"""
name = "name"
if table[-1:].isupper():
name = "NAME"
try:
if ID:
res = run_sql("SELECT id,name FROM %s where id=%s" % (table, ID))
else:
res = run_sql("SELECT id,name FROM %s" % table)
res = list(res)
res.sort(compare_on_val)
return res
except StandardError, e:
return []
def get_i8n_name(ID, ln, rtype, table):
"""Returns a list of the names, either with the name in the current language, the default language, or just the name from the given table
ln - a language supported by Invenio
type - the type of value wanted, like 'ln', 'sn'"""
name = "name"
if table[-1:].isupper():
name = "NAME"
try:
res = ""
if ID:
res = run_sql("SELECT id_%s,value FROM %s%s where type='%s' and ln='%s' and id_%s=%s" % (table, table, name, rtype,ln, table, ID))
else:
res = run_sql("SELECT id_%s,value FROM %s%s where type='%s' and ln='%s'" % (table, table, name, rtype,ln))
if ln != CFG_SITE_LANG:
if ID:
res1 = run_sql("SELECT id_%s,value FROM %s%s WHERE ln='%s' and type='%s' and id_%s=%s" % (table, table, name, CFG_SITE_LANG, rtype, table, ID))
else:
res1 = run_sql("SELECT id_%s,value FROM %s%s WHERE ln='%s' and type='%s'" % (table, table, name, CFG_SITE_LANG, rtype))
res2 = dict(res)
result = filter(lambda x: not res2.has_key(x[0]), res1)
res = res + result
if ID:
res1 = run_sql("SELECT id,name FROM %s where id=%s" % (table, ID))
else:
res1 = run_sql("SELECT id,name FROM %s" % table)
res2 = dict(res)
result = filter(lambda x: not res2.has_key(x[0]), res1)
res = res + result
res = list(res)
res.sort(compare_on_val)
return res
except StandardError, e:
raise StandardError
def get_name(ID, ln, rtype, table):
"""Returns the value from the table name based on arguments
ID - id
ln - a language supported by Invenio
type - the type of value wanted, like 'ln', 'sn'
table - tablename"""
name = "name"
if table[-1:].isupper():
name = "NAME"
try:
res = run_sql("SELECT value FROM %s%s WHERE type='%s' and ln='%s' and id_%s=%s" % (table, name, rtype, ln, table, ID))
return res
except StandardError, e:
return ()
def modify_translations(ID, langs, sel_type, trans, table):
"""add or modify translations in tables given by table
frmID - the id of the format from the format table
sel_type - the name type
langs - the languages
trans - the translations, in same order as in langs
table - the table"""
name = "name"
if table[-1:].isupper():
name = "NAME"
try:
for nr in range(0,len(langs)):
res = run_sql("SELECT value FROM %s%s WHERE id_%s=%%s AND type=%%s AND ln=%%s" % (table, name, table),
(ID, sel_type, langs[nr][0]))
if res:
if trans[nr]:
res = run_sql("UPDATE %s%s SET value=%%s WHERE id_%s=%%s AND type=%%s AND ln=%%s" % (table, name, table),
(trans[nr], ID, sel_type, langs[nr][0]))
else:
res = run_sql("DELETE FROM %s%s WHERE id_%s=%%s AND type=%%s AND ln=%%s" % (table, name, table),
(ID, sel_type, langs[nr][0]))
else:
if trans[nr]:
res = run_sql("INSERT INTO %s%s (id_%s, type, ln, value) VALUES (%%s,%%s,%%s,%%s)" % (table, name, table),
(ID, sel_type, langs[nr][0], trans[nr]))
return (1, "")
except StandardError, e:
return (0, e)
def write_outcome(res):
"""
Write the outcome of an update of some settings.
Parameter 'res' is a tuple (int, str), where 'int' is 0 when there
is an error to display, and 1 when everything went fine. 'str' is
a message displayed when there is an error.
"""
if res and res[0] == 1:
return """<b><span class="info">Operation successfully completed.</span></b>"""
elif res:
return """<b><span class="info">Operation failed. Reason:</span></b><br />%s""" % res[1]
|
AlbertoPeon/invenio
|
modules/bibrank/lib/bibrankadminlib.py
|
Python
|
gpl-2.0
| 41,781
|
# coding = utf-8
"""
Shopping and shopkeepers.
'Tale' mud driver, mudlib and interactive fiction framework
Copyright by Irmen de Jong (irmen@razorvine.net)
Shopping related commands will be roughly:
SHOP/LIST [item type]
list what the shop has for sale
INFO/INQUIRE/ASK about [item/number]
same as "ask [shopkeeper] about [item/number]"
It will display info about the item on sale, as if you examined it.
BUY
> buy sword (buy the first sword on the list)
> buy #3 (buy the third item on the list)
SELL
> sell sword (sell the first sword in your inventory)
VALUE/APPRAISE
ask shop keeper how much he is willing to pay for an item:
> value sword (appraise the first sword in your inventory)
"""
from __future__ import absolute_import, print_function, division, unicode_literals
import random
import datetime
from .npc import NPC
from .base import Item, clone
from .items.basic import Trash
from .errors import ActionRefused, ParseError, RetrySoulVerb
from .util import search_item, sorted_by_name
from . import mud_context
from . import lang
banking_money_limit = 15000.0
class Shopkeeper(NPC):
def init(self):
super(Shopkeeper, self).init()
self.shop = ShopBehavior()
self.verbs = {
"shop": "Go shopping! This shows some information about the shop, and what it has for sale.",
"list": "Go shopping! This shows some information about the shop, and what it has for sale.",
"sell": "Sell stuff",
"buy": "Buy stuff",
"value": "Ask the shopkeeper about what he or she's willing to pay for an item",
"appraise": "Ask the shopkeeper about what he or she's willing to pay for an item",
"info": "Ask about an item on sale. Name the item or give its list number.",
"inquire": "Ask about an item on sale. Name the item or give its list number.",
"ask": "Ask about an item on sale. Name the item or give its list number." # overrides default 'ask'
}
def set_shop(self, shop):
if any(item not in self for item in shop.forsale):
raise ValueError("not all items from shop.forsale are in the shopkeeper's inventory")
self.shop = shop
if self.shop.banks_money:
self.money = min(self.money, banking_money_limit) # make sure we don't have surplus cash
def do_wander(self, ctx):
# let the shopkeeper wander randomly
direction = self.select_random_move()
if direction:
self.move(direction.target, self)
ctx.driver.defer(random.randint(20, 60), self.do_wander)
def validate_open_hours(self, actor=None, current_time=None):
if actor and "wizard" in actor.privileges:
return # for wizards, shops are always open
if current_time is None:
current_time = mud_context.driver.game_clock.clock.time()
assert isinstance(current_time, datetime.time)
for from_hr, to_hr in self.shop.open_hours:
from_t = datetime.time(from_hr)
to_t = datetime.time(to_hr)
if from_hr < to_hr:
if from_t <= current_time < to_t: # normal order such as 9..17
return # we're open!
else:
if from_t <= current_time or current_time < to_t: # reversed order, passes midnight, such as 20..3
return # we're open!
raise ActionRefused("The shop is currently closed! Come back another time, during opening hours.")
def _parse_item(self, parsed, actor):
if len(parsed.who_info) != 1:
raise ParseError("I don't understand what single item you're talking about.")
item, info = parsed.who_info.popitem()
if item not in actor:
raise ActionRefused(self.shop.msg_playercantsell or "You don't have that.")
if not isinstance(item, Item):
raise ActionRefused("You can't sell %s, %s is not trade goods!" % (item.objective, item.subjective))
designator = info.previous_word or ""
return item, designator
def _get_from_list(self, number):
shoplist = sorted_by_name(self.inventory)
try:
return shoplist[number-1]
except IndexError:
raise ActionRefused("That number doesn't appear on the list of items that are for sale.")
def notify_action(self, parsed, actor):
# react to some things people might say such as "ask about <item>/<number>"
if parsed.verb in self.verbs:
return # avoid reacting to verbs we already have a handler for
unparsed = parsed.unparsed.split()
if self in parsed.who_info or self.name in unparsed or lang.capital(self.name) in unparsed \
or parsed.verb in ("hi", "hello", "greet", "wave"):
# someone referred to us
if random.random() < 0.2:
self.do_socialize("smile at " + actor.name)
elif random.random() < 0.2:
self.do_socialize("wave at " + actor.name)
elif random.random() < 0.2:
self.do_socialize("nod at " + actor.name)
def handle_verb(self, parsed, actor):
if self.shop.banks_money:
self.money = min(self.money, banking_money_limit) # make sure we don't have surplus cash
self.validate_open_hours(actor)
if parsed.verb in ("shop", "list"):
return self.shop_list(parsed, actor)
elif parsed.verb in ("info", "inquire", "ask"):
return self.shop_inquire(parsed, actor)
elif parsed.verb in ("value", "appraise"):
return self.shop_appraise(parsed, actor)
elif parsed.verb == "buy":
return self.shop_buy(parsed, actor)
elif parsed.verb == "sell":
return self.shop_sell(parsed, actor)
else:
return False # unrecognised verb
def shop_list(self, parsed, actor):
open_hrs = lang.join(["%d to %d" % hours for hours in self.shop.open_hours])
actor.tell("%s says: \"Welcome. Our opening hours are:" % lang.capital(self.title), open_hrs)
if "wizard" in actor.privileges:
actor.tell(" (but for wizards, we're always open)")
if self.shop.willbuy:
actor.tell(", and we specialize in", lang.join(lang.pluralize(word) for word in self.shop.willbuy))
actor.tell("\"\n", end=True)
# don't show shop.forsale, it is for the code to know what items have limitless supply
if self.inventory_size == 0:
actor.tell("%s apologizes, \"I'm sorry, but our stuff is all gone for the moment. Come back later.\"" % lang.capital(self.subjective))
self.do_socialize("shrug at " + actor.name)
else:
actor.tell("%s shows you a list of what is in stock at the moment:" % lang.capital(self.subjective), end=True)
txt = ["<ul> # <dim>|</><ul> item <dim>|</><ul> price </>"]
for i, item in enumerate(sorted_by_name(self.inventory), start=1):
price = item.value * self.shop.sellprofit
txt.append("%3d. %-30s %s" % (i, item.title, mud_context.driver.moneyfmt.display(price)))
actor.tell(*txt, format=False)
return True
def shop_inquire(self, parsed, actor):
item = None
if len(parsed.who_order) == 2:
# 'ask lucy about clock/#5/5'
item = parsed.who_order[0]
if not isinstance(item, Item):
item = parsed.who_order[1]
if not isinstance(item, Item):
item = None
elif len(parsed.who_order) == 1:
# 'ask about clock/#5/5'
item = parsed.who_order[0]
if not isinstance(item, Item):
item = None
if item:
# the parser found an item, check if there's one in the shop too with the same name.
shop_item = search_item(item.name, self.inventory)
if shop_item:
item = shop_item
if not item:
# no items in the question, try to extract name/number and look in the shop list #
for word in parsed.unrecognized:
if word in ("#", "about", "over"):
continue
if word.startswith("#"):
word = word[1:]
try:
number = int(word)
if number <= 0:
continue
except ValueError:
# not a number, search by name
item = search_item(word, self.inventory)
if not item:
continue
else:
# got a number in the shop
item = self._get_from_list(number)
if item:
# got an item, inquire about it
if item not in self:
raise ActionRefused("That is not something from the shop. You can examine the %s as usual." % item.name)
actor.tell("The shop sells %s." % lang.a(item.title))
if item.name in item.extra_desc:
actor.tell(lang.fullstop(item.extra_desc[item.name]))
elif item.description:
actor.tell(lang.fullstop(item.description))
if random.random() < 0.1:
actor.tell("\"Would you like to buy something?\", %s asks." % self.title)
elif random.random() < 0.1:
actor.tell("\"Take your time\", %s says." % self.title)
return True
if parsed.verb == "ask":
raise RetrySoulVerb
else:
raise ParseError("It's unclear what item you want to inquire about.")
def shop_appraise(self, parsed, actor):
item, designator = self._parse_item(parsed, actor)
if designator:
raise ParseError("It's not clear what item you mean.")
if item.value <= 0:
actor.tell("%s tells you it's worthless." % lang.capital(self.title))
return True
# @todo charisma bonus/malus
price = item.value * self.shop.buyprofit
value_str = mud_context.driver.moneyfmt.display(price)
actor.tell("%s appraises the %s." % (lang.capital(self.title), item.name))
actor.tell("%s tells you: \"I'll give you %s for it.\"" % (lang.capital(self.subjective), value_str))
return True
def shop_buy(self, parsed, actor):
if len(parsed.args) != 1:
raise ParseError("I don't understand what you want to buy.")
item = None
name = parsed.args[0]
if name[0] == '#':
# it's the Nth from the list
try:
num = int(name[1:])
if num <= 0:
raise ValueError("num needs to be 1 or higher")
item = sorted_by_name(self.inventory)[num - 1]
if search_item(item.title, self.shop.forsale):
item = clone(item) # make a clone and sell that, the forsale items should never run out
except ValueError:
raise ParseError("What number on the list do you mean?")
except IndexError:
raise ParseError("That number is not on the list.")
if not item:
item = search_item(name, self.shop.forsale)
if item:
item = clone(item) # make a clone and sell that, the forsale items should never run out
else:
# search inventory
item = self.search_item(name, include_inventory=True, include_location=False, include_containers_in_inventory=False)
if not item:
actor.tell("%s says: \"%s\"" % (lang.capital(self.title), self.shop.msg_playercantbuy))
return True
# sell the item to the customer
# @todo charisma bonus/malus
price = item.value * self.shop.sellprofit
if price > actor.money:
actor.tell("%s tells you: \"%s\"" % (lang.capital(self.title), self.shop.msg_playercantafford))
if self.shop.action_temper:
self.do_socialize("%s %s" % (self.shop.action_temper, actor.name))
return True
item.move(actor, actor)
actor.money -= price
self.money += price
assert actor.money >= 0.0
self.do_socialize("thank " + actor.name)
actor.tell("You've bought the %s!" % item.name)
if self.shop.msg_shopsolditem:
if "%d" in self.shop.msg_shopsolditem:
# old-style (circle) message with just a numeric value for the money
sold_msg = self.shop.msg_shopsolditem % price
else:
# new-style (tale) message with a %s placeholder for the money text
sold_msg = self.shop.msg_shopsolditem % mud_context.driver.moneyfmt.display(price)
actor.tell("%s says: \"%s\"" % (lang.capital(self.title), sold_msg))
else:
actor.tell("You paid %s for it." % mud_context.driver.moneyfmt.display(price))
if self.shop.banks_money:
# shopkeeper puts money over a limit in the bank
if self.money > banking_money_limit:
self.tell_others("Swiftly, %s puts some excess money away in a secret stash somewhere. You failed to see where it went." % self.title)
self.money = banking_money_limit
return True
def shop_sell(self, parsed, actor):
item, designator = self._parse_item(parsed, actor)
if designator:
raise ParseError("It's not clear what item you want to sell.")
if item.value <= 0 or isinstance(item, Trash):
actor.tell("%s tells you: \"%s\"" % (lang.capital(self.title), self.shop.msg_shopdoesnotbuy))
if self.shop.action_temper:
self.do_socialize("%s %s" % (self.shop.action_temper, actor.name))
return True
if search_item(item.title, self.shop.forsale):
# if the item is on the forsale list, don't buy it (we already have an endless supply)
actor.tell("%s tells you: \"%s\"" % (lang.capital(self.title), self.shop.msg_shopdoesnotbuy))
return True
# @todo check wontdealwith
# @todo check item type
# check money # @todo charisma bonus/malus
price = item.value * self.shop.buyprofit
limit = self.money * 0.75 # shopkeeper should not spend more than 75% of his money on a single sale
if price >= limit:
actor.tell("%s says: \"%s\"" % (lang.capital(self.title), self.shop.msg_shopcantafford))
return True
item.move(self, actor)
actor.money += price
self.money -= price
assert self.money >= 0.0
actor.tell("You've sold the %s." % item.name)
if self.shop.msg_shopboughtitem:
if "%d" in self.shop.msg_shopboughtitem:
# old-style (circle) message with just a numeric value for the money
bought_msg = self.shop.msg_shopboughtitem % price
else:
# new-style (tale) message with a %s placeholder for the money text
bought_msg = self.shop.msg_shopboughtitem % mud_context.driver.moneyfmt.display(price)
actor.tell("%s says: \"%s\"" % (lang.capital(self.title), bought_msg))
else:
actor.tell("%s gave you %s for it." % (lang.capital(self.title), mud_context.driver.moneyfmt.display(price)))
self.do_socialize("thank " + actor.name)
return True
class ShopBehavior(object):
"""the data describing the behavior of a particular shop"""
def __init__(self):
self.shopkeeper_vnum = None # used for circle data to designate the shopkeeper belonging to this shop
self.banks_money = False
self.will_fight = False
self._buyprofit = 0.3 # price factor when shop buys item
self._sellprofit = 1.6 # price factor when shop sells item
self.open_hours = [(9, 17), (18, 22)]
self.forsale = set() # items the shop always sells no matter how many are bought (should be in shopkeeper's inventory as well!)
self.msg_playercantafford = "No cash, no goods!"
self.msg_playercantbuy = "We don't sell that."
self.msg_playercantsell = "I don't think you have that."
self.msg_shopboughtitem = "Thank-you very much. Here are your %s as payment."
self.msg_shopcantafford = "I can't afford to buy anything, I'm only a poor peddler."
self.msg_shopdoesnotbuy = "I don't buy that stuff. Try another shop."
self.msg_shopsolditem = "Here you go. That'll be... %s."
self.action_temper = "smoke"
self.willbuy = set()
self.wontdealwith = set()
@property
def buyprofit(self):
return self._buyprofit
@buyprofit.setter
def buyprofit(self, value):
assert value <= 1.0
self._buyprofit = value
@property
def sellprofit(self):
return self._sellprofit
@sellprofit.setter
def sellprofit(self, value):
assert value >= 1.0
self._sellprofit = value
|
sils1297/Tale
|
tale/shop.py
|
Python
|
gpl-3.0
| 17,240
|
# coding: utf-8
"""
Onshape REST API
The Onshape REST API consumed by all clients. # noqa: E501
The version of the OpenAPI document: 1.113
Contact: api-support@onshape.zendesk.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
import sys # noqa: F401
import six # noqa: F401
import nulltype # noqa: F401
from onshape_client.oas.model_utils import ( # noqa: F401
ModelComposed,
ModelNormal,
ModelSimple,
date,
datetime,
file_type,
int,
none_type,
str,
validate_get_composed_info,
)
try:
from onshape_client.oas.models import btm_individual_query_base139
except ImportError:
btm_individual_query_base139 = sys.modules[
"onshape_client.oas.models.btm_individual_query_base139"
]
try:
from onshape_client.oas.models import btm_individual_query_with_occurrence811_all_of
except ImportError:
btm_individual_query_with_occurrence811_all_of = sys.modules[
"onshape_client.oas.models.btm_individual_query_with_occurrence811_all_of"
]
try:
from onshape_client.oas.models import btm_individual_query_with_occurrence_base904
except ImportError:
btm_individual_query_with_occurrence_base904 = sys.modules[
"onshape_client.oas.models.btm_individual_query_with_occurrence_base904"
]
try:
from onshape_client.oas.models import btm_inference_query_with_occurrence1083
except ImportError:
btm_inference_query_with_occurrence1083 = sys.modules[
"onshape_client.oas.models.btm_inference_query_with_occurrence1083"
]
class BTMIndividualQueryWithOccurrence811(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {}
validations = {}
additional_properties_type = None
@staticmethod
def openapi_types():
"""
This must be a class method so a model may have properties that are
of type self, this ensures that we don't create a cyclic import
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
return {
"bt_type": (str,), # noqa: E501
"entity_query": (str,), # noqa: E501
"deterministic_id_list": (
btm_individual_query_base139.BTMIndividualQueryBase139,
), # noqa: E501
"deterministic_ids": ([str],), # noqa: E501
"import_microversion": (str,), # noqa: E501
"node_id": (str,), # noqa: E501
"path": ([str],), # noqa: E501
"query": (
btm_individual_query_base139.BTMIndividualQueryBase139,
), # noqa: E501
"query_string": (str,), # noqa: E501
}
@staticmethod
def discriminator():
return {
"bt_type": {
"BTMInferenceQueryWithOccurrence-1083": btm_inference_query_with_occurrence1083.BTMInferenceQueryWithOccurrence1083,
},
}
attribute_map = {
"bt_type": "btType", # noqa: E501
"entity_query": "entityQuery", # noqa: E501
"deterministic_id_list": "deterministicIdList", # noqa: E501
"deterministic_ids": "deterministicIds", # noqa: E501
"import_microversion": "importMicroversion", # noqa: E501
"node_id": "nodeId", # noqa: E501
"path": "path", # noqa: E501
"query": "query", # noqa: E501
"query_string": "queryString", # noqa: E501
}
required_properties = set(
[
"_data_store",
"_check_type",
"_from_server",
"_path_to_item",
"_configuration",
"_composed_instances",
"_var_name_to_model_instances",
"_additional_properties_model_instances",
]
)
def __init__(
self,
_check_type=True,
_from_server=False,
_path_to_item=(),
_configuration=None,
**kwargs
): # noqa: E501
"""btm_individual_query_with_occurrence811.BTMIndividualQueryWithOccurrence811 - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_from_server (bool): True if the data is from the server
False if the data is from the client (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
bt_type (str): [optional] # noqa: E501
entity_query (str): [optional] # noqa: E501
deterministic_id_list (btm_individual_query_base139.BTMIndividualQueryBase139): [optional] # noqa: E501
deterministic_ids ([str]): [optional] # noqa: E501
import_microversion (str): [optional] # noqa: E501
node_id (str): [optional] # noqa: E501
path ([str]): [optional] # noqa: E501
query (btm_individual_query_base139.BTMIndividualQueryBase139): [optional] # noqa: E501
query_string (str): [optional] # noqa: E501
"""
self._data_store = {}
self._check_type = _check_type
self._from_server = _from_server
self._path_to_item = _path_to_item
self._configuration = _configuration
constant_args = {
"_check_type": _check_type,
"_path_to_item": _path_to_item,
"_from_server": _from_server,
"_configuration": _configuration,
}
required_args = {}
# remove args whose value is Null because they are unset
required_arg_names = list(required_args.keys())
for required_arg_name in required_arg_names:
if required_args[required_arg_name] is nulltype.Null:
del required_args[required_arg_name]
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in six.iteritems(kwargs):
if (
var_name in unused_args
and self._configuration is not None
and self._configuration.discard_unknown_keys
and not self._additional_properties_model_instances
):
# discard variable.
continue
setattr(self, var_name, var_value)
@staticmethod
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
return {
"anyOf": [],
"allOf": [
btm_individual_query_with_occurrence811_all_of.BTMIndividualQueryWithOccurrence811AllOf,
btm_individual_query_with_occurrence_base904.BTMIndividualQueryWithOccurrenceBase904,
],
"oneOf": [],
}
@classmethod
def get_discriminator_class(cls, from_server, data):
"""Returns the child class specified by the discriminator"""
discriminator = cls.discriminator()
discr_propertyname_py = list(discriminator.keys())[0]
discr_propertyname_js = cls.attribute_map[discr_propertyname_py]
if from_server:
class_name = data[discr_propertyname_js]
else:
class_name = data[discr_propertyname_py]
class_name_to_discr_class = discriminator[discr_propertyname_py]
return class_name_to_discr_class.get(class_name)
|
onshape-public/onshape-clients
|
python/onshape_client/oas/models/btm_individual_query_with_occurrence811.py
|
Python
|
mit
| 9,868
|
import random
from apps.algorithms.mean import Mean
from apps.algorithms.standart_deviation import StandartDeviation
from apps.algorithms.z_value import ZValue
from apps.datasets.dataset import DataSet
__author__ = 'cenk'
def demo2():
data_list = []
value_size = 10000
val = 0
while val < value_size:
data_list.append(random.randint(0, 1000000000))
val += 1
## I add here anomly
data_list.append(999999999999999999)
random.shuffle(data_list)
dataset = DataSet()
dataset.set(data_list)
train, validation, test = dataset.split_train_validation_test_data()
training_list = train.get()
validation_list = validation.get()
test_list = test.get()
standart_deviation = StandartDeviation()
standart_deviation_value = standart_deviation.calculate(training_list)
mean = Mean()
mean_value = mean.calculate(training_list)
# print "Training Set: %s, Validation Set: %s, Test Set: %s" % (training_list, validation_list, test_list)
print "Standart Deviation: %f, Mean Value: %f" % (standart_deviation_value, mean_value)
z_value = ZValue()
counter = 0
for val in validation_list:
z_value.calculate(val, mean=mean_value, standart_deviation=standart_deviation_value)
table_value = z_value.find_from_table()
if table_value == -1:
print "This val is anomaly:", val
counter += 1
print "Anomaly Count: %d, Dataset Count: %d" % (counter, dataset.__len__())
counter = 0
for val in test_list:
z_value.calculate(val, mean=mean_value, standart_deviation=standart_deviation_value)
table_value = z_value.find_from_table()
if table_value == -1:
print "This val is anomaly:", val
counter += 1
print "Anomaly Count: %d, Dataset Count: %d" % (counter, dataset.__len__())
if __name__ == "__main__":
print "-*-" * 20, "Demo 2 Starts", "-*-" * 20
demo2()
print "-*-" * 20, "Demo 2 Ends", "-*-" * 20
|
cenkbircanoglu/Anomaly-Detection
|
demos/demo2.py
|
Python
|
mit
| 1,996
|
import waffle
from rest_framework.exceptions import NotFound
def waffle_feature_is_active(request, instance_type, instance_name):
"""
Determine if flag, switch, or sample is active for the given user.
:param request: Django request
:param instance_type: Either "flag", "switch", or "sample"
:param instance_name: *Name* of the flag/switch/sample
:return: Boolean. Is the flag/switch/or sample active?
"""
waffle_map = {
'flag': {
'waffle_func': waffle.flag_is_active,
'waffle_args': (request, instance_name),
},
'switch': {
'waffle_func': waffle.switch_is_active,
'waffle_args': (instance_name,),
},
'sample': {
'waffle_func': waffle.sample_is_active,
'waffle_args': (instance_name,),
},
}[instance_type]
return waffle_map['waffle_func'](*waffle_map['waffle_args'])
def require_flag(flag_name):
"""
Decorator to check whether waffle flag is active. If inactive, raises NotFound.
"""
def wrapper(fn):
return check_waffle_object(fn, 'flag', flag_name)
return wrapper
def require_switch(switch_name):
"""
Decorator to check whether waffle switch is active. If inactive, raises NotFound.
"""
def wrapper(fn):
return check_waffle_object(fn, 'switch', switch_name)
return wrapper
def require_sample(sample_name):
"""
Decorator to check whether waffle sample is active. If inactive, raises NotFound.
"""
def wrapper(fn):
return check_waffle_object(fn, 'sample', sample_name)
return wrapper
def check_waffle_object(fn, instance_type, instance_name):
def check_waffle_object(*args, **kwargs):
if waffle_feature_is_active(args[0].request, instance_type, instance_name):
return fn(*args, **kwargs)
else:
raise NotFound('Endpoint is disabled.')
return check_waffle_object
|
pattisdr/osf.io
|
api/base/waffle_decorators.py
|
Python
|
apache-2.0
| 1,958
|
# -*- coding: utf-8 -*-
import os
class CronParser(object):
def __init__(self, logger=None):
self.logger = logger
def parse_cron_string(self, cron_string):
"""
Parse a cron data string
:param cron_string:
:return:
"""
formatted_cron_data = []
if cron_string:
cron_data = cron_string.split(u'\\n')
parsed_cron_data = self.parse_cron_data(cron_data)
formatted_cron_data = self.format_cron_data(parsed_cron_data)
return formatted_cron_data
def parse_cron_file(self, cron_file_path):
"""
Given a cron file return its contents in a list of dict
:param cron_file_path:
:return: formatted_cron_data dict
"""
formatted_cron_data = []
if self.is_valid_file_path(cron_file_path):
cron_data = self.get_cron_data(cron_file_path)
parsed_cron_data = self.parse_cron_data(cron_data)
formatted_cron_data = self.format_cron_data(parsed_cron_data)
return formatted_cron_data
def is_valid_file_path(self, cron_file_path):
"""
Check if the cron file exists / path is valid
:param cron_file_path:
:return:
"""
is_valid = False
if os.path.isfile(cron_file_path):
is_valid = True
return is_valid
def get_cron_data(self, cron_file_path):
"""
Read in a cron file
:param cron_file_path:
:return: list of cron config strings
"""
cron_data = []
try:
with open(cron_file_path, u'r') as cron_file:
for cron_config in cron_file:
if cron_config and len(cron_config) > 5:
cron_data.append(cron_config.strip(u'\n'))
except IOError as e:
if self.logger:
self.logger.error(u'Could not read cron file %s', e)
return cron_data
def parse_cron_data(self, cron_data):
"""
Parse and separate config string into constituent parts
:param cron_data:
:return list of lists of cron data
"""
parsed_cron_data = []
for cron_config in cron_data:
try:
cron_parts = cron_config.strip().split()
parsed_cron_data.append(cron_parts)
except ValueError as e:
if self.logger:
self.logger.warning(u'Malformed cron config %s', e)
return parsed_cron_data
def format_cron_data(self, cron_data):
"""
Put data into a desired list of dicts format
:param cron_data:
:return: structured list of dicts
"""
formatted_cron_data = []
for data in cron_data:
try:
format_data = {
u'minute': self.validate_in_range(data[0], 0, 59),
u'hour': self.validate_in_range(data[1], 0, 23),
u'path': data[2]
}
formatted_cron_data.append(format_data)
except IndexError as e:
if self.logger:
self.logger.error(u'Malformed cron entry %s', e)
return formatted_cron_data
def validate_in_range(self, value, low, high):
"""
Check a value is in range or return None
:param number:
:param low:
:param high:
:return:
"""
validated = None
try:
if low <= int(value) <= high:
validated = int(value)
except ValueError:
# Is usually '*'
if value == u'*':
validated = u'*'
return validated
|
ian-wilson/cron-admin
|
nextrun/cron_parser.py
|
Python
|
mit
| 3,747
|
# Spiel.py
import pygame
from pygame.locals import *
class Spiel(object):
def __init__(self):
self.Datum = "01-01"
self.Takt = 0
def Zyklus_Morgen_Schule(self):
|
Aurora-Beta/VinVG
|
experiments/spiel.py
|
Python
|
mit
| 175
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=eval-used,invalid-name,too-many-arguments
"""Utility functions"""
import tvm
from tvm import relay
from tvm.relay import transform
def has_multiple_inputs(node_list, node_idx, input_names, opt_out_op):
"""Check whether a node has multiple input nodes
except variable nodes.
Parameters
----------
node_list : list of dict of str to object
List of all nodes in a graph.
node_idx : int
Node index to be checked.
input_names : list of str
List of input names of graph.
Returns
-------
out : bool
Whether the specified node has multiple input nodes
"""
num_inputs = 0
node = node_list[node_idx]
for in_idx in node["inputs"]:
in_idx = in_idx[0]
in_node = node_list[in_idx]
# Exclude parameter nodes
if in_node["op"] is not None and in_node["op"].name in opt_out_op:
increase = False
for t_idx in in_node["inputs"]:
increase = has_multiple_inputs(node_list, t_idx[0], input_names, opt_out_op)
if increase:
num_inputs += 1
elif in_node["op"] is not None or ("name" in in_node and in_node["name"] in input_names):
num_inputs += 1
return num_inputs > 1
def is_boundary_node(node_entry, input_names):
"""Whether a node is a boundary node.
Currently input node and nodes in LAYOUT_FIXED_OP are
counted as boundary.
Parameters
----------
node_entry : dict
Node entry.
input_names : list of str
List of input names of graph.
Returns
-------
out : bool
whether node is a boundary node.
"""
# Operators dependent on original layouts.
_LAYOUT_FIXED_OP = [
relay.op.get(name)
for name in (
"nn.batch_flatten",
"transpose",
"reshape",
"vision.multibox_prior",
"vision.multibox_transform_loc",
"where",
"vision.non_max_suppression",
"strided_slice",
)
]
out = node_entry["op"] in _LAYOUT_FIXED_OP or (
"name" in node_entry and node_entry["name"] in input_names
)
return out
def is_skipped_node(node_entry):
"""Whether a node is not counted.
Parameters
----------
node_entry : dict
Node entry.
Returns
-------
out : bool
whether node is skipped.
"""
# Operators not counted in graph tuner.
return isinstance(node_entry["node"], relay.Tuple)
def bind_inputs(expr, input_shapes=None, input_dtypes="float32"):
"""Bind input variables of a relay function expression
to new shapes and/or dtypes.
Parameters
----------
expr : tvm.relay.Expr.Function
Input relay function expression.
input_shapes : dict of str to tuple of int, optional
Input shapes.
input_dtypes : str or dict of str to str, optional
Input dtypes.
Returns
-------
out : tvm.relay.Expr.Function
Bind relay function expression.
"""
if input_shapes is None:
return expr
if isinstance(input_dtypes, str):
input_dtypes = {key: input_dtypes for key in input_shapes.keys()}
updated_input_dict = {}
for input_name in input_shapes.keys():
updated_input = relay.var(
input_name, shape=input_shapes[input_name], dtype=input_dtypes[input_name]
)
updated_input_dict[input_name] = updated_input
rebind_dict = {}
for var in expr.params:
if var.name_hint in updated_input_dict:
rebind_dict[var] = updated_input_dict[var.name_hint]
updated_expr = relay.expr.bind(expr, rebind_dict)
mod = tvm.IRModule.from_expr(updated_expr)
mod = transform.InferType()(mod)
entry = mod["main"]
return entry if isinstance(updated_expr, relay.Function) else entry.body
|
Laurawly/tvm-1
|
python/tvm/autotvm/graph_tuner/utils/utils.py
|
Python
|
apache-2.0
| 4,682
|
version='alpha9'
version_info = (0,0,9)
|
chaomodus/pixywerk
|
pixywerk/version.py
|
Python
|
mit
| 40
|
from setuptools import setup, find_packages
setup(name='MODEL6399676120',
version=20140916,
description='MODEL6399676120 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/MODEL6399676120',
maintainer='Stanley Gu',
maintainer_url='stanleygu@gmail.com',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
)
|
biomodels/MODEL6399676120
|
setup.py
|
Python
|
cc0-1.0
| 377
|
# -*- coding: utf-8 -*-
""" test_dirtools.py - Test the dirtools module with pyfakefs. """
import shutil
import unittest
import os
import tarfile
import time
try:
import fake_filesystem
import fake_filesystem_shutil
except ImportError:
print "You must install pyfakefs in order to run the test suite."
import dirtools
class TestDirtools(unittest.TestCase):
def setUp(self):
""" Initialize a fake filesystem and dirtools. """
# First we create a fake filesystem in order to test dirtools
fk = fake_filesystem.FakeFilesystem()
fk.CreateDirectory('/test_dirtools')
fk.CreateFile('/test_dirtools/file1', contents='contents1')
fk.CreateFile('/test_dirtools/file2', contents='contents2')
fk.CreateFile('/test_dirtools/file3.py', contents='print "ok"')
fk.CreateFile('/test_dirtools/file3.pyc', contents='')
fk.CreateFile('/test_dirtools/.exclude', contents='excluded_dir/\n*.pyc')
fk.CreateDirectory('/test_dirtools/excluded_dir')
fk.CreateFile('/test_dirtools/excluded_dir/excluded_file',
contents='excluded')
fk.CreateDirectory('/test_dirtools/dir1')
fk.CreateDirectory('/test_dirtools/dir1/subdir1')
fk.CreateFile('/test_dirtools/dir1/subdir1/file_subdir1',
contents='inside subdir1')
fk.CreateFile('/test_dirtools/dir1/subdir1/.project')
fk.CreateDirectory('/test_dirtools/dir2')
fk.CreateFile('/test_dirtools/dir2/file_dir2', contents='inside dir2')
# Sort of "monkey patch" to make dirtools use the fake filesystem
dirtools.os = fake_filesystem.FakeOsModule(fk)
dirtools.open = fake_filesystem.FakeFileOpen(fk)
# Dirtools initialization
self.dir = dirtools.Dir('/test_dirtools')
self.os = dirtools.os
self.open = dirtools.open
self.shutil = fake_filesystem_shutil.FakeShutilModule(fk)
self.fk = fk
def testFiles(self):
""" Check that Dir.files return all files, except those excluded. """
self.assertEqual(sorted(self.dir.files()),
sorted(["file1",
"file2",
"file3.py",
".exclude",
"dir1/subdir1/file_subdir1",
"dir1/subdir1/.project",
"dir2/file_dir2"]))
def testFilesWithPatterns(self):
""" Check that Dir.files return all files matching the pattern, except those excluded. """
self.assertEqual(sorted(self.dir.files("*.py")),
sorted(["file3.py"]))
self.assertEqual(sorted(self.dir.files("*_dir2")),
sorted(["dir2/file_dir2"]))
def testSubdirs(self):
""" Check that Dir.subdirs return all subdirs, except those excluded. """
self.assertEqual(sorted(self.dir.subdirs()),
sorted(["dir1",
"dir1/subdir1",
"dir2"]))
def testSubdirsWithPatterns(self):
""" Check that Dir.subdirs return all subdirs matching the pattern, except those excluded. """
self.assertEqual(sorted(self.dir.subdirs("*1")),
sorted(["dir1",
"dir1/subdir1"]))
def testHashdir(self):
""" Check that the hashdir changes when a file change in the tree. """
hashdir = self.dir.hash(dirtools.filehash)
with self.open('/test_dirtools/file2', 'w') as f:
f.write("new content")
new_hashdir = self.dir.hash(dirtools.filehash)
self.assertNotEqual(hashdir, new_hashdir)
def testDirState(self):
dir_state = dirtools.DirState(self.dir, index_cmp=dirtools.filehash)
self.shutil.copytree('/test_dirtools', 'test_dirtools2')
with self.open('/test_dirtools2/dir1/subdir1/file_subdir1', 'w') as f:
f.write("dir state")
with self.open('/test_dirtools2/new_file', 'w') as f:
f.write("dir state")
self.os.remove('/test_dirtools2/file1')
self.shutil.rmtree('/test_dirtools2/dir2')
dir_state2 = dirtools.DirState(dirtools.Dir('/test_dirtools2'), index_cmp=dirtools.filehash)
diff = dir_state2 - dir_state
self.assertEqual(diff, {'deleted': ['file1', 'dir2/file_dir2'], 'updated': ['dir1/subdir1/file_subdir1'], 'deleted_dirs': ['dir2'], 'created': ['new_file']})
self.assertEqual(diff, dirtools.compute_diff(dir_state2.state, dir_state.state))
def testExclude(self):
""" Check that Dir.is_excluded actually exclude files. """
self.assertTrue(self.dir.is_excluded("excluded_dir"))
# Only the dir is excluded, the exclude line is excluded_dir/ not excluded_dir/*
self.assertFalse(self.dir.is_excluded("excluded_dir/excluded_file"))
self.assertTrue(self.dir.is_excluded("file3.pyc"))
self.assertFalse(self.dir.is_excluded("file3.py"))
def testProjects(self):
""" Check if Dir.find_projects find all projects in the directory tree. """
self.assertEqual(self.dir.find_projects(".project"), ['dir1/subdir1'])
def testCompression(self):
""" Check the compression, withouth pyfakefs because it doesn't support tarfile. """
dirtools.os = os
dirtools.open = open
test_dir = '/tmp/test_dirtools'
if os.path.isdir(test_dir):
shutil.rmtree(test_dir)
os.mkdir(test_dir)
with open(os.path.join(test_dir, 'file1'), 'w') as f:
f.write(os.urandom(2 ** 10))
with open(os.path.join(test_dir, 'file2.pyc'), 'w') as f:
f.write('excluded')
os.mkdir(os.path.join(test_dir, 'dir1'))
with open(os.path.join(test_dir, 'dir1/file1'), 'w') as f:
f.write(os.urandom(2 ** 10))
cdir = dirtools.Dir(test_dir)
archive_path = cdir.compress_to()
tar = tarfile.open(archive_path)
test_dir_extract = '/tmp/test_dirtools_extract'
if os.path.isdir(test_dir_extract):
shutil.rmtree(test_dir_extract)
os.mkdir(test_dir_extract)
tar.extractall(test_dir_extract)
extracted_dir = dirtools.Dir(test_dir_extract)
self.assertEqual(sorted(extracted_dir.files()),
sorted(cdir.files()))
self.assertEqual(sorted(extracted_dir.subdirs()),
sorted(cdir.subdirs()))
self.assertEqual(extracted_dir.hash(dirtools.filehash),
cdir.hash(dirtools.filehash))
shutil.rmtree(test_dir)
shutil.rmtree(test_dir_extract)
os.remove(archive_path)
if __name__ == '__main__':
unittest.main()
|
tsileo/dirtools
|
test_dirtools.py
|
Python
|
mit
| 6,852
|
# -*- coding: utf-8 -*-
#
# Copyright 2015 Federico Ficarelli
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
import glob
import os
import sys
if __name__ == "__main__":
DIRNAME = os.path.abspath(os.path.dirname(__file__))
if DIRNAME:
os.chdir(DIRNAME)
try:
py_dirname = DIRNAME
sys.path.insert(0, py_dirname)
import observer
version = observer.__version__
finally:
del sys.path[0]
# search executables
scripts = []
for filepath in glob.glob('bin/*'):
if os.path.isfile(filepath) and os.access(filepath, os.X_OK):
scripts.append(filepath)
# search packages
root_packages = []
packages = []
for package in root_packages:
package_dirname = os.path.join(DIRNAME, package)
for dirpath, dirnames, filenames in os.walk(package_dirname):
if '__init__.py' in filenames:
rdirpath = os.path.relpath(dirpath, DIRNAME)
packages.append(os.path.normpath(rdirpath).replace(os.sep, '.'))
setup(
name="python-observer",
version=version,
requires=[],
description="Python Observer Pattern",
author="Federico Ficarelli",
author_email="federico.ficarelli@gmail.com",
install_requires=(),
package_data={},
url="https://nazavode.github.io",
packages=packages,
scripts=scripts,
py_modules=['observer'],
classifiers=[
# status:
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 4 - Beta',
# audience:
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
# license:
'License :: OSI Approved :: Apache Software License',
# language:
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
],
keywords='observer design pattern',
)
|
nazavode/observer
|
setup.py
|
Python
|
apache-2.0
| 2,604
|
# -*- coding: utf-8 -*-
def social_eyebrow(entity, argument):
return True
#- Fine Funzione -
|
Onirik79/aaritmud
|
src/socials/social_eyebrow.py
|
Python
|
gpl-2.0
| 98
|
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2021, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This module is used as a nupic.research.frameworks plugin entrypoint to vernon
command line parser interface.
Each nupic.research.framework willing to add command line arguments to vernon
framework must implement two functions::
- get_parser : Returns preconfigured `argparse.ArgumentParser` class to be
added to the main `argparse.ArgumentParser`.
- process_args : Processes parsed arguments to modify config appropriately.
See nupic.research.frameworks.vernon.parset_utils for more details
"""
import argparse
import socket
__all__ = [
"get_parser",
"process_args",
]
def get_parser():
"""
Returns command line `argparse.ArgumentParser` with ray and ray tune command
line arguments
"""
ray_parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
argument_default=argparse.SUPPRESS,
add_help=False,
)
ray_parser.add_argument("-s", "--with-server", action="store_true",
help="Start Ray Tune API server")
ray_parser.add_argument("--single_instance", action="store_true",
help="Uses single instance run method")
ray_parser.add_argument("--local-mode", action="store_true",
help="Start ray in local mode. Useful for debugging")
ray_parser.add_argument("-a", "--redis-address",
help="redis address of an existing Ray server",
default="{}:6379".format(
socket.gethostbyname(socket.gethostname())
))
return ray_parser
def process_args(args, config):
"""
Processes parsed arguments to modify config appropriately.
:return: modified config or None to exit without running
"""
return config
|
numenta/nupic.research
|
packages/ray/src/nupic/research/frameworks/ray/command_line_args.py
|
Python
|
agpl-3.0
| 2,825
|
from datetime import datetime
import numpy as np
import pytest
import pandas as pd
from pandas import NaT, Series, Timestamp
import pandas._testing as tm
from pandas.core.internals.blocks import IntBlock
class TestSeriesInternals:
# GH 10265
def test_convert(self):
# Tests: All to nans, coerce, true
# Test coercion returns correct type
s = Series(["a", "b", "c"])
results = s._convert(datetime=True, coerce=True)
expected = Series([NaT] * 3)
tm.assert_series_equal(results, expected)
results = s._convert(numeric=True, coerce=True)
expected = Series([np.nan] * 3)
tm.assert_series_equal(results, expected)
expected = Series([NaT] * 3, dtype=np.dtype("m8[ns]"))
results = s._convert(timedelta=True, coerce=True)
tm.assert_series_equal(results, expected)
dt = datetime(2001, 1, 1, 0, 0)
td = dt - datetime(2000, 1, 1, 0, 0)
# Test coercion with mixed types
s = Series(["a", "3.1415", dt, td])
results = s._convert(datetime=True, coerce=True)
expected = Series([NaT, NaT, dt, NaT])
tm.assert_series_equal(results, expected)
results = s._convert(numeric=True, coerce=True)
expected = Series([np.nan, 3.1415, np.nan, np.nan])
tm.assert_series_equal(results, expected)
results = s._convert(timedelta=True, coerce=True)
expected = Series([NaT, NaT, NaT, td], dtype=np.dtype("m8[ns]"))
tm.assert_series_equal(results, expected)
# Test standard conversion returns original
results = s._convert(datetime=True)
tm.assert_series_equal(results, s)
results = s._convert(numeric=True)
expected = Series([np.nan, 3.1415, np.nan, np.nan])
tm.assert_series_equal(results, expected)
results = s._convert(timedelta=True)
tm.assert_series_equal(results, s)
# test pass-through and non-conversion when other types selected
s = Series(["1.0", "2.0", "3.0"])
results = s._convert(datetime=True, numeric=True, timedelta=True)
expected = Series([1.0, 2.0, 3.0])
tm.assert_series_equal(results, expected)
results = s._convert(True, False, True)
tm.assert_series_equal(results, s)
s = Series([datetime(2001, 1, 1, 0, 0), datetime(2001, 1, 1, 0, 0)], dtype="O")
results = s._convert(datetime=True, numeric=True, timedelta=True)
expected = Series([datetime(2001, 1, 1, 0, 0), datetime(2001, 1, 1, 0, 0)])
tm.assert_series_equal(results, expected)
results = s._convert(datetime=False, numeric=True, timedelta=True)
tm.assert_series_equal(results, s)
td = datetime(2001, 1, 1, 0, 0) - datetime(2000, 1, 1, 0, 0)
s = Series([td, td], dtype="O")
results = s._convert(datetime=True, numeric=True, timedelta=True)
expected = Series([td, td])
tm.assert_series_equal(results, expected)
results = s._convert(True, True, False)
tm.assert_series_equal(results, s)
s = Series([1.0, 2, 3], index=["a", "b", "c"])
result = s._convert(numeric=True)
tm.assert_series_equal(result, s)
# force numeric conversion
r = s.copy().astype("O")
r["a"] = "1"
result = r._convert(numeric=True)
tm.assert_series_equal(result, s)
r = s.copy().astype("O")
r["a"] = "1."
result = r._convert(numeric=True)
tm.assert_series_equal(result, s)
r = s.copy().astype("O")
r["a"] = "garbled"
result = r._convert(numeric=True)
expected = s.copy()
expected["a"] = np.nan
tm.assert_series_equal(result, expected)
# GH 4119, not converting a mixed type (e.g.floats and object)
s = Series([1, "na", 3, 4])
result = s._convert(datetime=True, numeric=True)
expected = Series([1, np.nan, 3, 4])
tm.assert_series_equal(result, expected)
s = Series([1, "", 3, 4])
result = s._convert(datetime=True, numeric=True)
tm.assert_series_equal(result, expected)
# dates
s = Series(
[
datetime(2001, 1, 1, 0, 0),
datetime(2001, 1, 2, 0, 0),
datetime(2001, 1, 3, 0, 0),
]
)
s2 = Series(
[
datetime(2001, 1, 1, 0, 0),
datetime(2001, 1, 2, 0, 0),
datetime(2001, 1, 3, 0, 0),
"foo",
1.0,
1,
Timestamp("20010104"),
"20010105",
],
dtype="O",
)
result = s._convert(datetime=True)
expected = Series(
[Timestamp("20010101"), Timestamp("20010102"), Timestamp("20010103")],
dtype="M8[ns]",
)
tm.assert_series_equal(result, expected)
result = s._convert(datetime=True, coerce=True)
tm.assert_series_equal(result, expected)
expected = Series(
[
Timestamp("20010101"),
Timestamp("20010102"),
Timestamp("20010103"),
NaT,
NaT,
NaT,
Timestamp("20010104"),
Timestamp("20010105"),
],
dtype="M8[ns]",
)
result = s2._convert(datetime=True, numeric=False, timedelta=False, coerce=True)
tm.assert_series_equal(result, expected)
result = s2._convert(datetime=True, coerce=True)
tm.assert_series_equal(result, expected)
s = Series(["foo", "bar", 1, 1.0], dtype="O")
result = s._convert(datetime=True, coerce=True)
expected = Series([NaT] * 2 + [Timestamp(1)] * 2)
tm.assert_series_equal(result, expected)
# preserver if non-object
s = Series([1], dtype="float32")
result = s._convert(datetime=True, coerce=True)
tm.assert_series_equal(result, s)
# FIXME: dont leave commented-out
# r = s.copy()
# r[0] = np.nan
# result = r._convert(convert_dates=True,convert_numeric=False)
# assert result.dtype == 'M8[ns]'
# dateutil parses some single letters into today's value as a date
expected = Series([NaT])
for x in "abcdefghijklmnopqrstuvwxyz":
s = Series([x])
result = s._convert(datetime=True, coerce=True)
tm.assert_series_equal(result, expected)
s = Series([x.upper()])
result = s._convert(datetime=True, coerce=True)
tm.assert_series_equal(result, expected)
def test_convert_no_arg_error(self):
s = Series(["1.0", "2"])
msg = r"At least one of datetime, numeric or timedelta must be True\."
with pytest.raises(ValueError, match=msg):
s._convert()
def test_convert_preserve_bool(self):
s = Series([1, True, 3, 5], dtype=object)
r = s._convert(datetime=True, numeric=True)
e = Series([1, 1, 3, 5], dtype="i8")
tm.assert_series_equal(r, e)
def test_convert_preserve_all_bool(self):
s = Series([False, True, False, False], dtype=object)
r = s._convert(datetime=True, numeric=True)
e = Series([False, True, False, False], dtype=bool)
tm.assert_series_equal(r, e)
def test_constructor_no_pandas_array(self):
ser = pd.Series([1, 2, 3])
result = pd.Series(ser.array)
tm.assert_series_equal(ser, result)
assert isinstance(result._mgr.blocks[0], IntBlock)
def test_astype_no_pandas_dtype(self):
# https://github.com/pandas-dev/pandas/pull/24866
ser = pd.Series([1, 2], dtype="int64")
# Don't have PandasDtype in the public API, so we use `.array.dtype`,
# which is a PandasDtype.
result = ser.astype(ser.array.dtype)
tm.assert_series_equal(result, ser)
def test_from_array(self):
result = pd.Series(pd.array(["1H", "2H"], dtype="timedelta64[ns]"))
assert result._mgr.blocks[0].is_extension is False
result = pd.Series(pd.array(["2015"], dtype="datetime64[ns]"))
assert result._mgr.blocks[0].is_extension is False
def test_from_list_dtype(self):
result = pd.Series(["1H", "2H"], dtype="timedelta64[ns]")
assert result._mgr.blocks[0].is_extension is False
result = pd.Series(["2015"], dtype="datetime64[ns]")
assert result._mgr.blocks[0].is_extension is False
def test_hasnans_uncached_for_series():
# GH#19700
idx = pd.Index([0, 1])
assert idx.hasnans is False
assert "hasnans" in idx._cache
ser = idx.to_series()
assert ser.hasnans is False
assert not hasattr(ser, "_cache")
ser.iloc[-1] = np.nan
assert ser.hasnans is True
assert Series.hasnans.__doc__ == pd.Index.hasnans.__doc__
|
TomAugspurger/pandas
|
pandas/tests/series/test_internals.py
|
Python
|
bsd-3-clause
| 8,912
|
# Copyright (c) 2006 Nathan Binkert <nate@binkert.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class _neg_inf(object):
'''This object always compares less than any other object'''
def __repr__(self): return '<neg_inf>'
def __lt__(self, other): return type(self) != type(other)
def __le__(self, other): return True
def __gt__(self, other): return False
def __ge__(self, other): return type(self) == type(other)
def __eq__(self, other): return type(self) == type(other)
def __ne__(self, other): return type(self) != type(other)
neg_inf = _neg_inf()
class _pos_inf(object):
'''This object always compares greater than any other object'''
def __repr__(self): return '<pos_inf>'
def __lt__(self, other): return False
def __le__(self, other): return type(self) == type(other)
def __gt__(self, other): return type(self) != type(other)
def __ge__(self, other): return True
def __eq__(self, other): return type(self) == type(other)
def __ne__(self, other): return type(self) != type(other)
pos_inf = _pos_inf()
class Region(tuple):
'''A region (range) of [start, end).
This includes utility functions to compare overlap of regions.'''
def __new__(cls, *args):
if len(args) == 1:
arg = args[0]
if isinstance(arg, Region):
return arg
args = tuple(arg)
if len(args) != 2:
raise AttributeError, \
"Only one or two arguments allowed, %d provided" % (alen, )
return tuple.__new__(cls, args)
def __repr__(self):
return 'Region(%s, %s)' % (self[0], self[1])
@property
def start(self):
return self[0]
@property
def end(self):
return self[1]
def __contains__(self, other):
'''other is
region: True if self and other is fully contained within self.
pos: True if other is within the region'''
if isinstance(other, tuple):
return self[0] <= other[0] and self[1] >= other[1]
return self[0] <= other and other < self[1]
def __eq__(self, other):
'''other is
region: True if self and other are identical.
pos: True if other is within the region'''
if isinstance(other, tuple):
return self[0] == other[0] and self[1] == other[1]
return self[0] <= other and other < self[1]
# @param self is a region.
# @param other is a region.
# @return if self and other are not identical.
def __ne__(self, other):
'''other is
region: true if they are not identical
pos: True if other is not in the region'''
if isinstance(other, tuple):
return self[0] != other[0] or self[1] != other[1]
return other < self[0] or self[1] <= other
# @param self is a region.
# @param other is a region.
# @return if self is less than other and does not overlap self.
def __lt__(self, other):
"self completely left of other (cannot overlap)"
if isinstance(other, tuple):
return self[1] <= other[0]
return self[1] <= other
# @param self is a region.
# @param other is a region.
# @return if self is less than other. self may overlap other,
# but not extend beyond the _end of other.
def __le__(self, other):
"self extends to the left of other (can overlap)"
if isinstance(other, tuple):
return self[0] <= other[0]
return self[0] <= other
# @param self is a region.
# @param other is a region.
# @return if self is greater than other and does not overlap other.
def __gt__(self, other):
"self is completely right of other (cannot overlap)"
if isinstance(other, tuple):
return self[0] >= other[1]
return self[0] > other
# @param self is a region.
# @param other is a region.
# @return if self is greater than other. self may overlap other,
# but not extend beyond the beginning of other.
def __ge__(self, other):
"self ex_ends beyond other to the right (can overlap)"
if isinstance(other, tuple):
return self[1] >= other[1]
return self[1] > other
class Regions(object):
'''A set of regions (ranges). Basically a region with holes.
Includes utility functions to merge regions and figure out if
something is in one of the regions.'''
def __init__(self, *args):
self.regions = []
self.extend(*args)
def copy(self):
copy = Regions()
copy.regions.extend(self.regions)
return copy
def append(self, *args):
self.regions.append(Region(*args))
def extend(self, *args):
self.regions.extend(Region(a) for a in args)
def __contains__(self, position):
for region in self.regions:
if position in region:
return True
return False
def __len__(self):
return len(self.regions)
def __iand__(self, other):
A = self.regions
B = other.regions
R = []
i = 0
j = 0
while i < len(self) and j < len(other):
a = A[i]
b = B[j]
if a[1] <= b[0]:
# A is completely before B. Skip A
i += 1
elif a[0] <= b[0]:
if a[1] <= b[1]:
# A and B overlap with B not left of A and A not right of B
R.append(Region(b[0], a[1]))
# Advance A because nothing is left
i += 1
if a[1] == b[1]:
# Advance B too
j += 1
else:
# A and B overlap with B completely within the bounds of A
R.append(Region(b[0], b[1]))
# Advance only B because some of A may still be useful
j += 1
elif b[1] <= a[0]:
# B is completely before A. Skip B.
j += 1
else:
assert b[0] < a[0]
if b[1] <= a[1]:
# A and B overlap with A not left of B and B not right of A
R.append(Region(a[0], b[1]))
# Advance B because nothing is left
j += 1
if a[1] == b[1]:
# Advance A too
i += 1
else:
# A and B overlap with A completely within the bounds of B
R.append(Region(a[0], a[1]))
# Advance only A because some of B may still be useful
i += 1
self.regions = R
return self
def __and__(self, other):
result = self.copy()
result &= other
return result
def __repr__(self):
return 'Regions(%s)' % ([(r[0], r[1]) for r in self.regions], )
all_regions = Regions(Region(neg_inf, pos_inf))
if __name__ == '__main__':
x = Regions(*((i, i + 1) for i in xrange(0,30,2)))
y = Regions(*((i, i + 4) for i in xrange(0,30,5)))
z = Region(6,7)
n = Region(9,10)
def test(left, right):
print "%s == %s: %s" % (left, right, left == right)
print "%s != %s: %s" % (left, right, left != right)
print "%s < %s: %s" % (left, right, left < right)
print "%s <= %s: %s" % (left, right, left <= right)
print "%s > %s: %s" % (left, right, left > right)
print "%s >= %s: %s" % (left, right, left >= right)
print
test(neg_inf, neg_inf)
test(neg_inf, pos_inf)
test(pos_inf, neg_inf)
test(pos_inf, pos_inf)
test(neg_inf, 0)
test(neg_inf, -11111)
test(neg_inf, 11111)
test(0, neg_inf)
test(-11111, neg_inf)
test(11111, neg_inf)
test(pos_inf, 0)
test(pos_inf, -11111)
test(pos_inf, 11111)
test(0, pos_inf)
test(-11111, pos_inf)
test(11111, pos_inf)
print x
print y
print x & y
print z
print 4 in x
print 4 in z
print 5 not in x
print 6 not in z
print z in y
print n in y, n not in y
|
rjschof/gem5
|
util/style/region.py
|
Python
|
bsd-3-clause
| 9,612
|
from batch_iv_analysis.ivAnalyzer import ivAnalyzer
import argparse
def runCLI(analyzer,args):
analyzer.setup()
print ("Got args:", args)
def handle_cli():
parser = argparse.ArgumentParser(description='Process some iv data.')
parser.add_argument('-f', '--files', default=None, type=argparse.FileType('r'), help='File(s) to analyze.')
parser.add_argument('-g', '--gui', default=False, action='store_true', help='Run with GUI.')
parser.add_argument('-s', '--no-sloppy', dest='sloppyMath', default=True, action='store_false', help="Don't do sloppy math (slower).")
parser.add_argument('-w', '--workers', default=0, type=int, help='Multiprocessing control. w=0 disables it. w>0 enables it with w workers.')
args = parser.parse_args()
if args.gui == False and args.files == None:
raise(ValueError('Command line interface (cli) mode needs files to process'))
a = ivAnalyzer(beFastAndSloppy=args.sloppyMath, poolWorkers=args.workers)
if args.gui == False:
runCLI(a,args)
else:
import batch_iv_analysis.gui as gui
gui.runGUI(a,args)
if __name__ == "__main__":
handle_cli()
|
greysAcademicCode/batch-iv-analysis
|
batch_iv_analysis/cli.py
|
Python
|
mit
| 1,115
|
#
# This file is part of pySMT.
#
# Copyright 2014 Andrea Micheli and Marco Gario
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import unittest
from pysmt.shortcuts import *
from pysmt.typing import REAL, BOOL, INT
from pysmt.test import (TestCase, skipIfSolverNotAvailable)
from pysmt.exceptions import (SolverReturnedUnknownResultError, \
NoSolverAvailableError)
from pysmt.logics import LRA, LIA, UFLIRA
class TestInterpolation(TestCase):
def test_selection(self):
with self.assertRaises(NoSolverAvailableError):
Interpolator(logic=UFLIRA)
with self.assertRaises(NoSolverAvailableError):
Interpolator(name="nonexistent")
@skipIfSolverNotAvailable('z3')
def test_binary_interpolant_z3(self):
self._test_binary_interpolant('z3')
@skipIfSolverNotAvailable('msat')
def test_binary_interpolant_msat(self):
self._test_binary_interpolant('msat')
@skipIfSolverNotAvailable('z3')
def test_sequence_interpolant_z3(self):
self._test_sequence_interpolant('z3')
@skipIfSolverNotAvailable('msat')
def test_sequence_interpolant_msat(self):
self._test_sequence_interpolant('msat')
def _test_binary_interpolant(self, name):
itp = Interpolator(name=name)
self._bool_example(itp, True)
self._real_example(itp, True)
self._int_example(itp, True)
def _test_sequence_interpolant(self, name):
itp = Interpolator(name=name)
self._bool_example(itp, False)
self._real_example(itp, False)
self._int_example(itp, False)
def _bool_example(self, itp, binary):
# Bool Example
x, y, z = Symbol("bx"), Symbol("by"), Symbol("bz")
a = And(x, Not(y))
b = And(Implies(z, y), z)
if binary:
i = itp.binary_interpolant(a, b)
else:
i = itp.sequence_interpolant([a, b])
self.assertTrue(i is not None)
if not binary:
self.assertTrue(hasattr(i, '__len__'))
self.assertTrue(len(i) == 1)
i = i[0]
self.assertTrue(i.get_free_variables() == set([y]))
self.assertValid(Implies(a, i))
self.assertUnsat(And(i, b))
def _real_example(self, itp, binary):
# Real Example
x, y, z = Symbol("rx", REAL), Symbol("ry", REAL), Symbol("rz", REAL)
a = And(LE(x, Real(0)), LE(y, x))
b = And(GE(y, z), Equals(z, Real(1)))
if binary:
i = itp.binary_interpolant(a, b)
else:
i = itp.sequence_interpolant([a, b])
self.assertTrue(i is not None)
if not binary:
self.assertTrue(hasattr(i, '__len__'))
self.assertTrue(len(i) == 1)
i = i[0]
self.assertTrue(i.get_free_variables() == set([y]))
self.assertValid(Implies(a, i))
self.assertUnsat(And(i, b))
def _int_example(self, itp, binary):
# Int Example
x, y, z = Symbol("ix", INT), Symbol("iy", INT), Symbol("iz", INT)
a = And(LE(x, Int(1)), LT(y, x))
b = And(GE(y, z), GT(z, Int(0)))
if binary:
i = itp.binary_interpolant(a, b)
else:
i = itp.sequence_interpolant([a, b])
self.assertTrue(i is not None)
if not binary:
self.assertTrue(hasattr(i, '__len__'))
self.assertTrue(len(i) == 1)
i = i[0]
self.assertTrue(i.get_free_variables() == set([y]))
self.assertValid(Implies(a, i))
self.assertUnsat(And(i, b))
if __name__ == '__main__':
unittest.main()
|
idkwim/pysmt
|
pysmt/test/test_interpolation.py
|
Python
|
apache-2.0
| 4,190
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from builtins import str
from pyhive import presto
from pyhive.exc import DatabaseError
from airflow.hooks.dbapi_hook import DbApiHook
class PrestoException(Exception):
pass
class PrestoHook(DbApiHook):
"""
Interact with Presto through PyHive!
>>> ph = PrestoHook()
>>> sql = "SELECT count(1) AS num FROM airflow.static_babynames"
>>> ph.get_records(sql)
[[340698]]
"""
conn_name_attr = 'presto_conn_id'
default_conn_name = 'presto_default'
def get_conn(self):
"""Returns a connection object"""
db = self.get_connection(self.presto_conn_id)
return presto.connect(
host=db.host,
port=db.port,
username=db.login,
catalog=db.extra_dejson.get('catalog', 'hive'),
schema=db.schema)
@staticmethod
def _strip_sql(sql):
return sql.strip().rstrip(';')
@staticmethod
def _get_pretty_exception_message(e):
"""
Parses some DatabaseError to provide a better error message
"""
if (hasattr(e, 'message') and
'errorName' in e.message and
'message' in e.message):
return ('{name}: {message}'.format(
name=e.message['errorName'],
message=e.message['message']))
else:
return str(e)
def get_records(self, hql, parameters=None):
"""
Get a set of records from Presto
"""
try:
return super(PrestoHook, self).get_records(
self._strip_sql(hql), parameters)
except DatabaseError as e:
raise PrestoException(self._get_pretty_exception_message(e))
def get_first(self, hql, parameters=None):
"""
Returns only the first row, regardless of how many rows the query
returns.
"""
try:
return super(PrestoHook, self).get_first(
self._strip_sql(hql), parameters)
except DatabaseError as e:
raise PrestoException(self._get_pretty_exception_message(e))
def get_pandas_df(self, hql, parameters=None):
"""
Get a pandas dataframe from a sql query.
"""
import pandas
cursor = self.get_cursor()
try:
cursor.execute(self._strip_sql(hql), parameters)
data = cursor.fetchall()
except DatabaseError as e:
raise PrestoException(self._get_pretty_exception_message(e))
column_descriptions = cursor.description
if data:
df = pandas.DataFrame(data)
df.columns = [c[0] for c in column_descriptions]
else:
df = pandas.DataFrame()
return df
def run(self, hql, parameters=None):
"""
Execute the statement against Presto. Can be used to create views.
"""
return super(PrestoHook, self).run(self._strip_sql(hql), parameters)
# TODO Enable commit_every once PyHive supports transaction.
# Unfortunately, PyHive 0.5.1 doesn't support transaction for now,
# whereas Presto 0.132+ does.
def insert_rows(self, table, rows, target_fields=None):
"""
A generic way to insert a set of tuples into a table.
:param table: Name of the target table
:type table: str
:param rows: The rows to insert into the table
:type rows: iterable of tuples
:param target_fields: The names of the columns to fill in the table
:type target_fields: iterable of strings
"""
super(PrestoHook, self).insert_rows(table, rows, target_fields, 0)
|
sid88in/incubator-airflow
|
airflow/hooks/presto_hook.py
|
Python
|
apache-2.0
| 4,438
|
from django.dispatch import Signal
location_created = Signal(providing_args=['loc'])
location_edited = Signal(providing_args=['loc', 'moved'])
|
puttarajubr/commcare-hq
|
corehq/apps/locations/signals.py
|
Python
|
bsd-3-clause
| 144
|
from corehq.apps.commtrack.models import StockState
from custom.ilsgateway.models import SupplyPointStatus, SupplyPointStatusValues, SupplyPointStatusTypes
from custom.ilsgateway.tanzania.reminders import DELIVERY_PARTIAL_CONFIRM, NOT_DELIVERED_CONFIRM, \
DELIVERY_CONFIRM_DISTRICT, DELIVERY_CONFIRM_CHILDREN
from custom.ilsgateway.tanzania.test.utils import ILSTestScript
class ILSDeliveredTest(ILSTestScript):
def setUp(self):
super(ILSDeliveredTest, self).setUp()
def test_delivery_facility_received_no_quantities_reported(self):
script = """
5551234 > delivered
5551234 < {0}
""".format(DELIVERY_PARTIAL_CONFIRM)
self.run_script(script)
sps = SupplyPointStatus.objects.filter(location_id=self.loc1.get_id,
status_type="del_fac").order_by("-status_date")[0]
self.assertEqual(SupplyPointStatusValues.RECEIVED, sps.status_value)
self.assertEqual(SupplyPointStatusTypes.DELIVERY_FACILITY, sps.status_type)
def test_delivery_facility_received_quantities_reported(self):
script = """
5551234 > delivered jd 400 mc 569
5551234 < {0}
""".format("received stock report for loc1(Test Facility 1) R jd400 mc569")
self.run_script(script)
self.assertEqual(2, StockState.objects.count())
for ps in StockState.objects.all():
self.assertEqual(self.loc1.linked_supply_point().get_id, ps.case_id)
self.assertTrue(0 != ps.stock_on_hand)
def test_delivery_facility_not_received(self):
script = """
5551234 > sijapokea
5551234 < {0}
""".format(NOT_DELIVERED_CONFIRM)
self.run_script(script)
sps = SupplyPointStatus.objects.filter(location_id=self.loc1.get_id,
status_type="del_fac").order_by("-status_date")[0]
self.assertEqual(SupplyPointStatusValues.NOT_RECEIVED, sps.status_value)
self.assertEqual(SupplyPointStatusTypes.DELIVERY_FACILITY, sps.status_type)
def test_delivery_district_received(self):
script = """
555 > nimepokea
555 < {0}
5551234 < {1}
5555678 < {1}
""".format(DELIVERY_CONFIRM_DISTRICT % dict(contact_name="{0} {1}".format(self.user_dis.first_name,
self.user_dis.last_name),
facility_name=self.dis.name),
DELIVERY_CONFIRM_CHILDREN % dict(district_name=self.dis.name))
self.run_script(script)
sps = SupplyPointStatus.objects.filter(location_id=self.dis.get_id,
status_type="del_dist").order_by("-status_date")[0]
self.assertEqual(SupplyPointStatusValues.RECEIVED, sps.status_value)
self.assertEqual(SupplyPointStatusTypes.DELIVERY_DISTRICT, sps.status_type)
def test_delivery_district_not_received(self):
script = """
555 > sijapokea
555 < {0}
""".format(NOT_DELIVERED_CONFIRM)
self.run_script(script)
sps = SupplyPointStatus.objects.filter(location_id=self.dis.get_id,
status_type="del_dist").order_by("-status_date")[0]
self.assertEqual(SupplyPointStatusValues.NOT_RECEIVED, sps.status_value)
self.assertEqual(SupplyPointStatusTypes.DELIVERY_DISTRICT, sps.status_type)
|
puttarajubr/commcare-hq
|
custom/ilsgateway/tanzania/test/delivered.py
|
Python
|
bsd-3-clause
| 3,569
|
"""Raw SNMP SMI module dumps.
As dumped by smidump dump using the python format option.
"""
from __future__ import absolute_import
from itertools import chain
import importlib
from django.utils import six
from nav.config import NAV_CONFIG
from nav.oids import OID
_mib_map = {}
def get_mib(mib_module):
"""Returns the smidumped MIB definition of a named MIB module, if it exists
in NAV.
"""
if not mib_module:
return None
if mib_module not in _mib_map:
for path in get_search_path():
try:
name = '.' + mib_module if path else mib_module # support top namespace
module = importlib.import_module(name, path)
except ImportError:
continue
else:
convert_oids(module.MIB)
_mib_map[mib_module] = module
break
else:
return None
return _mib_map[mib_module].MIB
def get_search_path():
"""Returns the configured smidumps search path"""
return NAV_CONFIG.get("SMIDUMPS", "nav.smidumps").split(':')
def convert_oids(mib):
"""Converts a mib data structure's oid strings to OID objects.
mib is expected to be a data structure as dumped by the smidump utility
(using the -f python option).
"""
for node in chain(
mib.get('nodes', {}).values(),
mib.get('notifications', {}).values()
):
if isinstance(node['oid'], six.string_types):
node['oid'] = OID(node['oid'])
|
hmpf/nav
|
python/nav/smidumps/__init__.py
|
Python
|
gpl-3.0
| 1,524
|
import pickle
import matplotlib.pyplot as plt
import matplotlib.patches
import matplotlib as mpl
import numpy as np
import sys, argparse
sys.path.append("../")
import Plotting
colors=[
'#d7191c',
'#fdae61',
'#abd9e9',
'#2c7bb6',
]
Names = {
'mini_gb2': 'VC-GB2',
'mini_gb5': 'VC-GB5',
'mini_lin': 'VC-Lin',
'epsall_gb2': '$\epsilon$-GB2',
'epsall_gb5': '$\epsilon$-GB5',
'epsall_lin': '$\epsilon$-Lin',
'lin': 'LinUCB'
}
Styles = {
'mini_gb2': ['k', 'solid'],
'mini_gb5': [colors[1], 'solid'],
'mini_lin': [colors[0], 'solid'],
'epsall_gb2': ['k', 'dashed'],
'epsall_gb5': [colors[1], 'dashed'],
'epsall_lin': [colors[0], 'dashed'],
'lin': [colors[3], 'solid']
}
marker=10
band=False
parser = argparse.ArgumentParser()
parser.add_argument('--save', dest='save', action='store_true')
Args = parser.parse_args(sys.argv[1:])
D1 = Plotting.read_dir("../results/mslr30k_T=36000_L=3_e=0.1/")
fig = plt.figure(figsize=(mpl.rcParams['figure.figsize'][0],mpl.rcParams['figure.figsize'][1]-1))
ax = fig.add_subplot(111)
plt.rc('font', size=18)
plt.rcParams['text.usetex'] = True
plt.rc('font', family='sans-serif')
ticks=ax.get_yticks()
print(ticks)
ax.set_ylim(2.15, 2.35)
print("Setting ylim to %0.2f, %0.2f" % (ticks[3], ticks[len(ticks)-2]))
ticks = ax.get_yticks()
print(ticks)
# ticks = ["", "", "2.2", "", "2.3", ""]
# ax.set_yticklabels(ticks,size=16)
ticks = ['', '', '10000', '', '20000', '', '30000']
ax.set_xlim(1000, 31000)
ax.set_xticklabels(ticks,size=16)
plt.ylabel('Average reward', fontsize=16)
plt.xlabel('Rounds (T)', fontsize=16)
# ax.tick_params(labelsize=16)
plt.gcf().subplots_adjust(bottom=0.25)
plt.savefig('../figs/mslr_blank.pdf', format='pdf')
keys = ['epsall_lin', 'epsall_gb5']
for k in keys:
params = []
mus = []
for (k1,v1) in D1[0].items():
if k1.find(k) == 0 and len(D1[0][k1]) != 0:
x = np.arange(100, 10*len(D1[0][k1][0])+1, 100)
mus.append(np.mean(D1[0][k1],axis=0)[9::10]/x)
params.append(k1.split("_")[-1])
if len(mus) == 0:
continue
A = np.vstack(mus)
ids = np.argmax(A, axis=0)
mu = np.array([A[ids[i], i] for i in range(len(ids))])
if k == 'mini_gb5':
mu = np.mean(D1[0]['mini_gb5_0.008'], axis=0)[9::10]/x
l1 = ax.plot(x,mu,rasterized=True, linewidth=2.0, label=Names[k], color=Styles[k][0], linestyle=Styles[k][1])
plt.savefig('../figs/mslr_noninteractive.pdf', format='pdf')
keys = ['mini_lin', 'mini_gb5', 'lin']
for k in keys:
params = []
mus = []
for (k1,v1) in D1[0].items():
if k1.find(k) == 0 and len(D1[0][k1]) != 0:
x = np.arange(100, 10*len(D1[0][k1][0])+1, 100)
mus.append(np.mean(D1[0][k1],axis=0)[9::10]/x)
params.append(k1.split("_")[-1])
if len(mus) == 0:
continue
A = np.vstack(mus)
ids = np.argmax(A, axis=0)
mu = np.array([A[ids[i], i] for i in range(len(ids))])
if k == 'mini_gb5':
mu = np.mean(D1[0]['mini_gb5_0.008'], axis=0)[9::10]/x
l1 = ax.plot(x,mu,rasterized=True, linewidth=5.0, label=Names[k], color=Styles[k][0], linestyle=Styles[k][1])
else:
l1 = ax.plot(x,mu,rasterized=True, linewidth=2.0, label=Names[k], color=Styles[k][0], linestyle=Styles[k][1])
plt.savefig('../figs/mslr_all.pdf', format='pdf')
|
akshaykr/oracle_cb
|
semibandits/sequential_plot.py
|
Python
|
mit
| 3,374
|
#!/usr/bin/python
import realog.debug as debug
import lutin.tools as tools
import os
def get_type():
return "LIBRARY"
def get_desc():
return "opencv Image processing library"
def get_licence():
return "APAPCHE-2"
def get_maintainer():
return ["Maksim Shabunin <maksim.shabunin@itseez.com>"]
def get_version():
return [3,1,0]
def configure(target, my_module):
my_module.add_src_file([
'opencv/modules/superres/src/btv_l1_cuda.cpp',
'opencv/modules/superres/src/btv_l1.cpp',
'opencv/modules/superres/src/optical_flow.cpp',
'opencv/modules/superres/src/super_resolution.cpp',
#'opencv/modules/superres/src/cuda/btv_l1_gpu.cu',
'opencv/modules/superres/src/input_array_utility.cpp',
#'opencv/modules/superres/src/opencl/superres_btvl1.cl',
'opencv/modules/superres/src/frame_source.cpp',
])
my_module.add_flag('c++', [
"-DCVAPI_EXPORTS",
"-D__OPENCV_BUILD=1",
"-fsigned-char",
"-W",
"-Wall",
"-Werror=return-type",
"-Werror=non-virtual-dtor",
"-Werror=address",
"-Werror=sequence-point",
"-Wformat",
"-Werror=format-security",
"-Wmissing-declarations",
"-Winit-self",
"-Wpointer-arith",
"-Wshadow",
"-Wsign-promo",
"-Wno-narrowing",
"-Wno-delete-non-virtual-dtor",
"-fdiagnostics-show-option",
"-Wno-long-long",
"-fomit-frame-pointer",
"-ffunction-sections",
"-fvisibility=hidden",
"-fvisibility-inlines-hidden",
])
my_module.add_header_file(
"opencv/modules/superres/include/*",
recursive=True)
my_module.add_depend([
'opencv-core',
'opencv-imgproc',
'opencv-video',
])
if "Android" in target.get_type():
my_module.add_flag('c++', "-DANDROID")
my_module.compile_version("C++", 2003)
return True
|
generic-library/opencv-lutin
|
lutin_opencv-superres.py
|
Python
|
apache-2.0
| 1,814
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Nova base exception handling.
Includes decorator for re-raising Nova-type exceptions.
SHOULD include dedicated exception logging.
"""
import functools
import sys
from oslo.config import cfg
import webob.exc
from nova.i18n import _
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova import safe_utils
LOG = logging.getLogger(__name__)
exc_log_opts = [
cfg.BoolOpt('fatal_exception_format_errors',
default=False,
help='Make exception message format errors fatal'),
]
CONF = cfg.CONF
CONF.register_opts(exc_log_opts)
class ConvertedException(webob.exc.WSGIHTTPException):
def __init__(self, code=0, title="", explanation=""):
self.code = code
self.title = title
self.explanation = explanation
super(ConvertedException, self).__init__()
def _cleanse_dict(original):
"""Strip all admin_password, new_pass, rescue_pass keys from a dict."""
return dict((k, v) for k, v in original.iteritems() if "_pass" not in k)
def wrap_exception(notifier=None, get_notifier=None):
"""This decorator wraps a method to catch any exceptions that may
get thrown. It logs the exception as well as optionally sending
it to the notification system.
"""
def inner(f):
def wrapped(self, context, *args, **kw):
# Don't store self or context in the payload, it now seems to
# contain confidential information.
try:
return f(self, context, *args, **kw)
except Exception as e:
with excutils.save_and_reraise_exception():
if notifier or get_notifier:
payload = dict(exception=e)
call_dict = safe_utils.getcallargs(f, context,
*args, **kw)
cleansed = _cleanse_dict(call_dict)
payload.update({'args': cleansed})
# If f has multiple decorators, they must use
# functools.wraps to ensure the name is
# propagated.
event_type = f.__name__
(notifier or get_notifier()).error(context,
event_type,
payload)
return functools.wraps(f)(wrapped)
return inner
class NovaException(Exception):
"""Base Nova Exception
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That msg_fmt will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = _("An unknown exception occurred.")
code = 500
headers = {}
safe = False
def __init__(self, message=None, **kwargs):
self.kwargs = kwargs
if 'code' not in self.kwargs:
try:
self.kwargs['code'] = self.code
except AttributeError:
pass
if not message:
try:
message = self.msg_fmt % kwargs
except Exception:
exc_info = sys.exc_info()
# kwargs doesn't match a variable in the message
# log the issue and the kwargs
LOG.exception(_('Exception in string format operation'))
for name, value in kwargs.iteritems():
LOG.error("%s: %s" % (name, value)) # noqa
if CONF.fatal_exception_format_errors:
raise exc_info[0], exc_info[1], exc_info[2]
else:
# at least get the core message out if something happened
message = self.msg_fmt
super(NovaException, self).__init__(message)
def format_message(self):
# NOTE(mrodden): use the first argument to the python Exception object
# which should be our full NovaException message, (see __init__)
return self.args[0]
class EncryptionFailure(NovaException):
msg_fmt = _("Failed to encrypt text: %(reason)s")
class DecryptionFailure(NovaException):
msg_fmt = _("Failed to decrypt text: %(reason)s")
class RevokeCertFailure(NovaException):
msg_fmt = _("Failed to revoke certificate for %(project_id)s")
class VirtualInterfaceCreateException(NovaException):
msg_fmt = _("Virtual Interface creation failed")
class VirtualInterfaceMacAddressException(NovaException):
msg_fmt = _("Creation of virtual interface with "
"unique mac address failed")
class GlanceConnectionFailed(NovaException):
msg_fmt = _("Connection to glance host %(host)s:%(port)s failed: "
"%(reason)s")
class CinderConnectionFailed(NovaException):
msg_fmt = _("Connection to cinder host failed: %(reason)s")
class Forbidden(NovaException):
ec2_code = 'AuthFailure'
msg_fmt = _("Not authorized.")
code = 403
class AdminRequired(Forbidden):
msg_fmt = _("User does not have admin privileges")
class PolicyNotAuthorized(Forbidden):
msg_fmt = _("Policy doesn't allow %(action)s to be performed.")
class ImageNotActive(NovaException):
# NOTE(jruzicka): IncorrectState is used for volumes only in EC2,
# but it still seems like the most appropriate option.
ec2_code = 'IncorrectState'
msg_fmt = _("Image %(image_id)s is not active.")
class ImageNotAuthorized(NovaException):
msg_fmt = _("Not authorized for image %(image_id)s.")
class Invalid(NovaException):
msg_fmt = _("Unacceptable parameters.")
code = 400
class InvalidBDM(Invalid):
msg_fmt = _("Block Device Mapping is Invalid.")
class InvalidBDMSnapshot(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get snapshot %(id)s.")
class InvalidBDMVolume(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get volume %(id)s.")
class InvalidBDMImage(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"failed to get image %(id)s.")
class InvalidBDMBootSequence(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"Boot sequence for the instance "
"and image/block device mapping "
"combination is not valid.")
class InvalidBDMLocalsLimit(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"You specified more local devices than the "
"limit allows")
class InvalidBDMEphemeralSize(InvalidBDM):
msg_fmt = _("Ephemeral disks requested are larger than "
"the instance type allows.")
class InvalidBDMSwapSize(InvalidBDM):
msg_fmt = _("Swap drive requested is larger than instance type allows.")
class InvalidBDMFormat(InvalidBDM):
msg_fmt = _("Block Device Mapping is Invalid: "
"%(details)s")
class InvalidBDMForLegacy(InvalidBDM):
msg_fmt = _("Block Device Mapping cannot "
"be converted to legacy format. ")
class InvalidBDMVolumeNotBootable(InvalidBDM):
msg_fmt = _("Block Device %(id)s is not bootable.")
class InvalidAttribute(Invalid):
msg_fmt = _("Attribute not supported: %(attr)s")
class ValidationError(Invalid):
msg_fmt = "%(detail)s"
class VolumeUnattached(Invalid):
ec2_code = 'IncorrectState'
msg_fmt = _("Volume %(volume_id)s is not attached to anything")
class VolumeNotCreated(NovaException):
msg_fmt = _("Volume %(volume_id)s did not finish being created"
" even after we waited %(seconds)s seconds or %(attempts)s"
" attempts.")
class InvalidKeypair(Invalid):
ec2_code = 'InvalidKeyPair.Format'
msg_fmt = _("Keypair data is invalid: %(reason)s")
class InvalidRequest(Invalid):
msg_fmt = _("The request is invalid.")
class InvalidInput(Invalid):
msg_fmt = _("Invalid input received: %(reason)s")
class InvalidVolume(Invalid):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Invalid volume: %(reason)s")
class InvalidVolumeAccessMode(Invalid):
msg_fmt = _("Invalid volume access mode: %(access_mode)s")
class InvalidMetadata(Invalid):
msg_fmt = _("Invalid metadata: %(reason)s")
class InvalidMetadataSize(Invalid):
msg_fmt = _("Invalid metadata size: %(reason)s")
class InvalidPortRange(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("Invalid port range %(from_port)s:%(to_port)s. %(msg)s")
class InvalidIpProtocol(Invalid):
msg_fmt = _("Invalid IP protocol %(protocol)s.")
class InvalidContentType(Invalid):
msg_fmt = _("Invalid content type %(content_type)s.")
class InvalidUnicodeParameter(Invalid):
msg_fmt = _("Invalid Parameter: "
"Unicode is not supported by the current database.")
# Cannot be templated as the error syntax varies.
# msg needs to be constructed when raised.
class InvalidParameterValue(Invalid):
ec2_code = 'InvalidParameterValue'
msg_fmt = _("%(err)s")
class InvalidAggregateAction(Invalid):
msg_fmt = _("Cannot perform action '%(action)s' on aggregate "
"%(aggregate_id)s. Reason: %(reason)s.")
class InvalidGroup(Invalid):
msg_fmt = _("Group not valid. Reason: %(reason)s")
class InvalidSortKey(Invalid):
msg_fmt = _("Sort key supplied was not valid.")
class InvalidStrTime(Invalid):
msg_fmt = _("Invalid datetime string: %(reason)s")
class InstanceInvalidState(Invalid):
msg_fmt = _("Instance %(instance_uuid)s in %(attr)s %(state)s. Cannot "
"%(method)s while the instance is in this state.")
class InstanceNotRunning(Invalid):
msg_fmt = _("Instance %(instance_id)s is not running.")
class InstanceNotInRescueMode(Invalid):
msg_fmt = _("Instance %(instance_id)s is not in rescue mode")
class InstanceNotRescuable(Invalid):
msg_fmt = _("Instance %(instance_id)s cannot be rescued: %(reason)s")
class InstanceNotReady(Invalid):
msg_fmt = _("Instance %(instance_id)s is not ready")
class InstanceSuspendFailure(Invalid):
msg_fmt = _("Failed to suspend instance: %(reason)s")
class InstanceResumeFailure(Invalid):
msg_fmt = _("Failed to resume instance: %(reason)s")
class InstancePowerOnFailure(Invalid):
msg_fmt = _("Failed to power on instance: %(reason)s")
class InstancePowerOffFailure(Invalid):
msg_fmt = _("Failed to power off instance: %(reason)s")
class InstanceRebootFailure(Invalid):
msg_fmt = _("Failed to reboot instance: %(reason)s")
class InstanceTerminationFailure(Invalid):
msg_fmt = _("Failed to terminate instance: %(reason)s")
class InstanceDeployFailure(Invalid):
msg_fmt = _("Failed to deploy instance: %(reason)s")
class MultiplePortsNotApplicable(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class InvalidFixedIpAndMaxCountRequest(Invalid):
msg_fmt = _("Failed to launch instances: %(reason)s")
class ServiceUnavailable(Invalid):
msg_fmt = _("Service is unavailable at this time.")
class ComputeResourcesUnavailable(ServiceUnavailable):
msg_fmt = _("Insufficient compute resources: %(reason)s.")
class HypervisorUnavailable(NovaException):
msg_fmt = _("Connection to the hypervisor is broken on host: %(host)s")
class ComputeServiceUnavailable(ServiceUnavailable):
msg_fmt = _("Compute service of %(host)s is unavailable at this time.")
class ComputeServiceInUse(NovaException):
msg_fmt = _("Compute service of %(host)s is still in use.")
class UnableToMigrateToSelf(Invalid):
msg_fmt = _("Unable to migrate instance (%(instance_id)s) "
"to current host (%(host)s).")
class InvalidHypervisorType(Invalid):
msg_fmt = _("The supplied hypervisor type of is invalid.")
class DestinationHypervisorTooOld(Invalid):
msg_fmt = _("The instance requires a newer hypervisor version than "
"has been provided.")
class DestinationDiskExists(Invalid):
msg_fmt = _("The supplied disk path (%(path)s) already exists, "
"it is expected not to exist.")
class InvalidDevicePath(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is invalid.")
class DevicePathInUse(Invalid):
msg_fmt = _("The supplied device path (%(path)s) is in use.")
code = 409
class DeviceIsBusy(Invalid):
msg_fmt = _("The supplied device (%(device)s) is busy.")
class InvalidCPUInfo(Invalid):
msg_fmt = _("Unacceptable CPU info: %(reason)s")
class InvalidIpAddressError(Invalid):
msg_fmt = _("%(address)s is not a valid IP v4/6 address.")
class InvalidVLANTag(Invalid):
msg_fmt = _("VLAN tag is not appropriate for the port group "
"%(bridge)s. Expected VLAN tag is %(tag)s, "
"but the one associated with the port group is %(pgroup)s.")
class InvalidVLANPortGroup(Invalid):
msg_fmt = _("vSwitch which contains the port group %(bridge)s is "
"not associated with the desired physical adapter. "
"Expected vSwitch is %(expected)s, but the one associated "
"is %(actual)s.")
class InvalidDiskFormat(Invalid):
msg_fmt = _("Disk format %(disk_format)s is not acceptable")
class InvalidDiskInfo(Invalid):
msg_fmt = _("Disk info file is invalid: %(reason)s")
class DiskInfoReadWriteFail(Invalid):
msg_fmt = _("Failed to read or write disk info file: %(reason)s")
class ImageUnacceptable(Invalid):
msg_fmt = _("Image %(image_id)s is unacceptable: %(reason)s")
class InstanceUnacceptable(Invalid):
msg_fmt = _("Instance %(instance_id)s is unacceptable: %(reason)s")
class InvalidEc2Id(Invalid):
msg_fmt = _("Ec2 id %(ec2_id)s is unacceptable.")
class InvalidUUID(Invalid):
msg_fmt = _("Expected a uuid but received %(uuid)s.")
class InvalidID(Invalid):
msg_fmt = _("Invalid ID received %(id)s.")
class ConstraintNotMet(NovaException):
msg_fmt = _("Constraint not met.")
code = 412
class NotFound(NovaException):
msg_fmt = _("Resource could not be found.")
code = 404
class AgentBuildNotFound(NotFound):
msg_fmt = _("No agent-build associated with id %(id)s.")
class AgentBuildExists(NovaException):
msg_fmt = _("Agent-build with hypervisor %(hypervisor)s os %(os)s "
"architecture %(architecture)s exists.")
class VolumeNotFound(NotFound):
ec2_code = 'InvalidVolume.NotFound'
msg_fmt = _("Volume %(volume_id)s could not be found.")
class VolumeBDMNotFound(NotFound):
msg_fmt = _("No volume Block Device Mapping with id %(volume_id)s.")
class SnapshotNotFound(NotFound):
ec2_code = 'InvalidSnapshot.NotFound'
msg_fmt = _("Snapshot %(snapshot_id)s could not be found.")
class DiskNotFound(NotFound):
msg_fmt = _("No disk at %(location)s")
class VolumeDriverNotFound(NotFound):
msg_fmt = _("Could not find a handler for %(driver_type)s volume.")
class InvalidImageRef(Invalid):
msg_fmt = _("Invalid image href %(image_href)s.")
class AutoDiskConfigDisabledByImage(Invalid):
msg_fmt = _("Requested image %(image)s "
"has automatic disk resize disabled.")
class ImageNotFound(NotFound):
msg_fmt = _("Image %(image_id)s could not be found.")
class PreserveEphemeralNotSupported(Invalid):
msg_fmt = _("The current driver does not support "
"preserving ephemeral partitions.")
# NOTE(jruzicka): ImageNotFound is not a valid EC2 error code.
class ImageNotFoundEC2(ImageNotFound):
msg_fmt = _("Image %(image_id)s could not be found. The nova EC2 API "
"assigns image ids dynamically when they are listed for the "
"first time. Have you listed image ids since adding this "
"image?")
class ProjectNotFound(NotFound):
msg_fmt = _("Project %(project_id)s could not be found.")
class StorageRepositoryNotFound(NotFound):
msg_fmt = _("Cannot find SR to read/write VDI.")
class NetworkDuplicated(Invalid):
msg_fmt = _("Network %(network_id)s is duplicated.")
class NetworkInUse(NovaException):
msg_fmt = _("Network %(network_id)s is still in use.")
class NetworkNotCreated(Invalid):
msg_fmt = _("%(req)s is required to create a network.")
class LabelTooLong(Invalid):
msg_fmt = _("Maximum allowed length for 'label' is 255.")
class InvalidIntValue(Invalid):
msg_fmt = _("%(key)s must be an integer.")
class InvalidCidr(Invalid):
msg_fmt = _("%(cidr)s is not a valid ip network.")
class InvalidAddress(Invalid):
msg_fmt = _("%(address)s is not a valid ip address.")
class AddressOutOfRange(Invalid):
msg_fmt = _("%(address)s is not within %(cidr)s.")
class DuplicateVlan(NovaException):
msg_fmt = _("Detected existing vlan with id %(vlan)d")
code = 409
class CidrConflict(NovaException):
msg_fmt = _('Requested cidr (%(cidr)s) conflicts '
'with existing cidr (%(other)s)')
code = 409
class NetworkHasProject(NetworkInUse):
msg_fmt = _('Network must be disassociated from project '
'%(project_id)s before it can be deleted.')
class NetworkNotFound(NotFound):
msg_fmt = _("Network %(network_id)s could not be found.")
class PortNotFound(NotFound):
msg_fmt = _("Port id %(port_id)s could not be found.")
class NetworkNotFoundForBridge(NetworkNotFound):
msg_fmt = _("Network could not be found for bridge %(bridge)s")
class NetworkNotFoundForUUID(NetworkNotFound):
msg_fmt = _("Network could not be found for uuid %(uuid)s")
class NetworkNotFoundForCidr(NetworkNotFound):
msg_fmt = _("Network could not be found with cidr %(cidr)s.")
class NetworkNotFoundForInstance(NetworkNotFound):
msg_fmt = _("Network could not be found for instance %(instance_id)s.")
class NoNetworksFound(NotFound):
msg_fmt = _("No networks defined.")
class NoMoreNetworks(NovaException):
msg_fmt = _("No more available networks.")
class NetworkNotFoundForProject(NotFound):
msg_fmt = _("Either network uuid %(network_uuid)s is not present or "
"is not assigned to the project %(project_id)s.")
class NetworkAmbiguous(Invalid):
msg_fmt = _("More than one possible network found. Specify "
"network ID(s) to select which one(s) to connect to,")
class NetworkRequiresSubnet(Invalid):
msg_fmt = _("Network %(network_uuid)s requires a subnet in order to boot"
" instances on.")
class ExternalNetworkAttachForbidden(Forbidden):
msg_fmt = _("It is not allowed to create an interface on "
"external network %(network_uuid)s")
class NetworkMissingPhysicalNetwork(NovaException):
msg_fmt = _("Physical network is missing for network %(network_uuid)s")
class DatastoreNotFound(NotFound):
msg_fmt = _("Could not find the datastore reference(s) which the VM uses.")
class PortInUse(Invalid):
msg_fmt = _("Port %(port_id)s is still in use.")
class PortRequiresFixedIP(Invalid):
msg_fmt = _("Port %(port_id)s requires a FixedIP in order to be used.")
class PortNotUsable(Invalid):
msg_fmt = _("Port %(port_id)s not usable for instance %(instance)s.")
class PortNotFree(Invalid):
msg_fmt = _("No free port available for instance %(instance)s.")
class FixedIpExists(NovaException):
msg_fmt = _("Fixed ip %(address)s already exists.")
class FixedIpNotFound(NotFound):
msg_fmt = _("No fixed IP associated with id %(id)s.")
class FixedIpNotFoundForAddress(FixedIpNotFound):
msg_fmt = _("Fixed ip not found for address %(address)s.")
class FixedIpNotFoundForInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s has zero fixed ips.")
class FixedIpNotFoundForNetworkHost(FixedIpNotFound):
msg_fmt = _("Network host %(host)s has zero fixed ips "
"in network %(network_id)s.")
class FixedIpNotFoundForSpecificInstance(FixedIpNotFound):
msg_fmt = _("Instance %(instance_uuid)s doesn't have fixed ip '%(ip)s'.")
class FixedIpNotFoundForNetwork(FixedIpNotFound):
msg_fmt = _("Fixed IP address (%(address)s) does not exist in "
"network (%(network_uuid)s).")
class FixedIpAlreadyInUse(NovaException):
msg_fmt = _("Fixed IP address %(address)s is already in use on instance "
"%(instance_uuid)s.")
class FixedIpAssociatedWithMultipleInstances(NovaException):
msg_fmt = _("More than one instance is associated with fixed ip address "
"'%(address)s'.")
class FixedIpInvalid(Invalid):
msg_fmt = _("Fixed IP address %(address)s is invalid.")
class NoMoreFixedIps(NovaException):
ec2_code = 'UnsupportedOperation'
msg_fmt = _("Zero fixed ips available.")
class NoFixedIpsDefined(NotFound):
msg_fmt = _("Zero fixed ips could be found.")
class FloatingIpExists(NovaException):
msg_fmt = _("Floating ip %(address)s already exists.")
class FloatingIpNotFound(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip not found for id %(id)s.")
class FloatingIpDNSExists(Invalid):
msg_fmt = _("The DNS entry %(name)s already exists in domain %(domain)s.")
class FloatingIpNotFoundForAddress(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for address %(address)s.")
class FloatingIpNotFoundForHost(FloatingIpNotFound):
msg_fmt = _("Floating ip not found for host %(host)s.")
class FloatingIpMultipleFoundForAddress(NovaException):
msg_fmt = _("Multiple floating ips are found for address %(address)s.")
class FloatingIpPoolNotFound(NotFound):
msg_fmt = _("Floating ip pool not found.")
safe = True
class NoMoreFloatingIps(FloatingIpNotFound):
msg_fmt = _("Zero floating ips available.")
safe = True
class FloatingIpAssociated(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Floating ip %(address)s is associated.")
class FloatingIpNotAssociated(NovaException):
msg_fmt = _("Floating ip %(address)s is not associated.")
class NoFloatingIpsDefined(NotFound):
msg_fmt = _("Zero floating ips exist.")
class NoFloatingIpInterface(NotFound):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Interface %(interface)s not found.")
class CannotDisassociateAutoAssignedFloatingIP(NovaException):
ec2_code = "UnsupportedOperation"
msg_fmt = _("Cannot disassociate auto assigned floating ip")
class KeypairNotFound(NotFound):
ec2_code = 'InvalidKeyPair.NotFound'
msg_fmt = _("Keypair %(name)s not found for user %(user_id)s")
class ServiceNotFound(NotFound):
msg_fmt = _("Service %(service_id)s could not be found.")
class ServiceBinaryExists(NovaException):
msg_fmt = _("Service with host %(host)s binary %(binary)s exists.")
class ServiceTopicExists(NovaException):
msg_fmt = _("Service with host %(host)s topic %(topic)s exists.")
class HostNotFound(NotFound):
msg_fmt = _("Host %(host)s could not be found.")
class ComputeHostNotFound(HostNotFound):
msg_fmt = _("Compute host %(host)s could not be found.")
class ComputeHostNotCreated(HostNotFound):
msg_fmt = _("Compute host %(name)s needs to be created first"
" before updating.")
class HostBinaryNotFound(NotFound):
msg_fmt = _("Could not find binary %(binary)s on host %(host)s.")
class InvalidReservationExpiration(Invalid):
msg_fmt = _("Invalid reservation expiration %(expire)s.")
class InvalidQuotaValue(Invalid):
msg_fmt = _("Change would make usage less than 0 for the following "
"resources: %(unders)s")
class InvalidQuotaMethodUsage(Invalid):
msg_fmt = _("Wrong quota method %(method)s used on resource %(res)s")
class QuotaNotFound(NotFound):
msg_fmt = _("Quota could not be found")
class QuotaExists(NovaException):
msg_fmt = _("Quota exists for project %(project_id)s, "
"resource %(resource)s")
class QuotaResourceUnknown(QuotaNotFound):
msg_fmt = _("Unknown quota resources %(unknown)s.")
class ProjectUserQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for user %(user_id)s in project %(project_id)s "
"could not be found.")
class ProjectQuotaNotFound(QuotaNotFound):
msg_fmt = _("Quota for project %(project_id)s could not be found.")
class QuotaClassNotFound(QuotaNotFound):
msg_fmt = _("Quota class %(class_name)s could not be found.")
class QuotaUsageNotFound(QuotaNotFound):
msg_fmt = _("Quota usage for project %(project_id)s could not be found.")
class ReservationNotFound(QuotaNotFound):
msg_fmt = _("Quota reservation %(uuid)s could not be found.")
class OverQuota(NovaException):
msg_fmt = _("Quota exceeded for resources: %(overs)s")
class SecurityGroupNotFound(NotFound):
msg_fmt = _("Security group %(security_group_id)s not found.")
class SecurityGroupNotFoundForProject(SecurityGroupNotFound):
msg_fmt = _("Security group %(security_group_id)s not found "
"for project %(project_id)s.")
class SecurityGroupNotFoundForRule(SecurityGroupNotFound):
msg_fmt = _("Security group with rule %(rule_id)s not found.")
class SecurityGroupExists(Invalid):
ec2_code = 'InvalidGroup.Duplicate'
msg_fmt = _("Security group %(security_group_name)s already exists "
"for project %(project_id)s.")
class SecurityGroupExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is already associated"
" with the instance %(instance_id)s")
class SecurityGroupNotExistsForInstance(Invalid):
msg_fmt = _("Security group %(security_group_id)s is not associated with"
" the instance %(instance_id)s")
class SecurityGroupDefaultRuleNotFound(Invalid):
msg_fmt = _("Security group default rule (%rule_id)s not found.")
class SecurityGroupCannotBeApplied(Invalid):
msg_fmt = _("Network requires port_security_enabled and subnet associated"
" in order to apply security groups.")
class SecurityGroupRuleExists(Invalid):
ec2_code = 'InvalidPermission.Duplicate'
msg_fmt = _("Rule already exists in group: %(rule)s")
class NoUniqueMatch(NovaException):
msg_fmt = _("No Unique Match Found.")
code = 409
class MigrationNotFound(NotFound):
msg_fmt = _("Migration %(migration_id)s could not be found.")
class MigrationNotFoundByStatus(MigrationNotFound):
msg_fmt = _("Migration not found for instance %(instance_id)s "
"with status %(status)s.")
class ConsolePoolNotFound(NotFound):
msg_fmt = _("Console pool %(pool_id)s could not be found.")
class ConsolePoolExists(NovaException):
msg_fmt = _("Console pool with host %(host)s, console_type "
"%(console_type)s and compute_host %(compute_host)s "
"already exists.")
class ConsolePoolNotFoundForHostType(NotFound):
msg_fmt = _("Console pool of type %(console_type)s "
"for compute host %(compute_host)s "
"on proxy host %(host)s not found.")
class ConsoleNotFound(NotFound):
msg_fmt = _("Console %(console_id)s could not be found.")
class ConsoleNotFoundForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s could not be found.")
class ConsoleNotFoundInPoolForInstance(ConsoleNotFound):
msg_fmt = _("Console for instance %(instance_uuid)s "
"in pool %(pool_id)s could not be found.")
class ConsoleTypeInvalid(Invalid):
msg_fmt = _("Invalid console type %(console_type)s")
class ConsoleTypeUnavailable(Invalid):
msg_fmt = _("Unavailable console type %(console_type)s.")
class ConsolePortRangeExhausted(NovaException):
msg_fmt = _("The console port range %(min_port)d-%(max_port)d is "
"exhausted.")
class FlavorNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s could not be found.")
class FlavorNotFoundByName(FlavorNotFound):
msg_fmt = _("Flavor with name %(flavor_name)s could not be found.")
class FlavorAccessNotFound(NotFound):
msg_fmt = _("Flavor access not found for %(flavor_id)s / "
"%(project_id)s combination.")
class FlavorExtraSpecUpdateCreateFailed(NovaException):
msg_fmt = _("Flavor %(id)d extra spec cannot be updated or created "
"after %(retries)d retries.")
class CellNotFound(NotFound):
msg_fmt = _("Cell %(cell_name)s doesn't exist.")
class CellExists(NovaException):
msg_fmt = _("Cell with name %(name)s already exists.")
class CellRoutingInconsistency(NovaException):
msg_fmt = _("Inconsistency in cell routing: %(reason)s")
class CellServiceAPIMethodNotFound(NotFound):
msg_fmt = _("Service API method not found: %(detail)s")
class CellTimeout(NotFound):
msg_fmt = _("Timeout waiting for response from cell")
class CellMaxHopCountReached(NovaException):
msg_fmt = _("Cell message has reached maximum hop count: %(hop_count)s")
class NoCellsAvailable(NovaException):
msg_fmt = _("No cells available matching scheduling criteria.")
class CellsUpdateUnsupported(NovaException):
msg_fmt = _("Cannot update cells configuration file.")
class InstanceUnknownCell(NotFound):
msg_fmt = _("Cell is not known for instance %(instance_uuid)s")
class SchedulerHostFilterNotFound(NotFound):
msg_fmt = _("Scheduler Host Filter %(filter_name)s could not be found.")
class FlavorExtraSpecsNotFound(NotFound):
msg_fmt = _("Flavor %(flavor_id)s has no extra specs with "
"key %(extra_specs_key)s.")
class ComputeHostMetricNotFound(NotFound):
msg_fmt = _("Metric %(name)s could not be found on the compute "
"host node %(host)s.%(node)s.")
class FileNotFound(NotFound):
msg_fmt = _("File %(file_path)s could not be found.")
class NoFilesFound(NotFound):
msg_fmt = _("Zero files could be found.")
class SwitchNotFoundForNetworkAdapter(NotFound):
msg_fmt = _("Virtual switch associated with the "
"network adapter %(adapter)s not found.")
class NetworkAdapterNotFound(NotFound):
msg_fmt = _("Network adapter %(adapter)s could not be found.")
class ClassNotFound(NotFound):
msg_fmt = _("Class %(class_name)s could not be found: %(exception)s")
class NotAllowed(NovaException):
msg_fmt = _("Action not allowed.")
class ImageRotationNotAllowed(NovaException):
msg_fmt = _("Rotation is not allowed for snapshots")
class RotationRequiredForBackup(NovaException):
msg_fmt = _("Rotation param is required for backup image_type")
class KeyPairExists(NovaException):
ec2_code = 'InvalidKeyPair.Duplicate'
msg_fmt = _("Key pair '%(key_name)s' already exists.")
class InstanceExists(NovaException):
msg_fmt = _("Instance %(name)s already exists.")
class FlavorExists(NovaException):
msg_fmt = _("Flavor with name %(name)s already exists.")
class FlavorIdExists(NovaException):
msg_fmt = _("Flavor with ID %(flavor_id)s already exists.")
class FlavorAccessExists(NovaException):
msg_fmt = _("Flavor access already exists for flavor %(flavor_id)s "
"and project %(project_id)s combination.")
class InvalidSharedStorage(NovaException):
msg_fmt = _("%(path)s is not on shared storage: %(reason)s")
class InvalidLocalStorage(NovaException):
msg_fmt = _("%(path)s is not on local storage: %(reason)s")
class StorageError(NovaException):
msg_fmt = _("Storage error: %(reason)s")
class MigrationError(NovaException):
msg_fmt = _("Migration error: %(reason)s")
class MigrationPreCheckError(MigrationError):
msg_fmt = _("Migration pre-check error: %(reason)s")
class MalformedRequestBody(NovaException):
msg_fmt = _("Malformed message body: %(reason)s")
# NOTE(johannes): NotFound should only be used when a 404 error is
# appropriate to be returned
class ConfigNotFound(NovaException):
msg_fmt = _("Could not find config at %(path)s")
class PasteAppNotFound(NovaException):
msg_fmt = _("Could not load paste app '%(name)s' from %(path)s")
class CannotResizeToSameFlavor(NovaException):
msg_fmt = _("When resizing, instances must change flavor!")
class ResizeError(NovaException):
msg_fmt = _("Resize error: %(reason)s")
class CannotResizeDisk(NovaException):
msg_fmt = _("Server disk was unable to be resized because: %(reason)s")
class FlavorMemoryTooSmall(NovaException):
msg_fmt = _("Flavor's memory is too small for requested image.")
class FlavorDiskTooSmall(NovaException):
msg_fmt = _("Flavor's disk is too small for requested image.")
class InsufficientFreeMemory(NovaException):
msg_fmt = _("Insufficient free memory on compute node to start %(uuid)s.")
class NoValidHost(NovaException):
msg_fmt = _("No valid host was found. %(reason)s")
class QuotaError(NovaException):
ec2_code = 'ResourceLimitExceeded'
msg_fmt = _("Quota exceeded: code=%(code)s")
# NOTE(cyeoh): 413 should only be used for the ec2 API
# The error status code for out of quota for the nova api should be
# 403 Forbidden.
code = 413
headers = {'Retry-After': 0}
safe = True
class TooManyInstances(QuotaError):
msg_fmt = _("Quota exceeded for %(overs)s: Requested %(req)s,"
" but already used %(used)d of %(allowed)d %(resource)s")
class FloatingIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of floating ips exceeded")
class FixedIpLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of fixed ips exceeded")
class MetadataLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of metadata items exceeds %(allowed)d")
class OnsetFileLimitExceeded(QuotaError):
msg_fmt = _("Personality file limit exceeded")
class OnsetFilePathLimitExceeded(OnsetFileLimitExceeded):
msg_fmt = _("Personality file path too long")
class OnsetFileContentLimitExceeded(OnsetFileLimitExceeded):
msg_fmt = _("Personality file content too long")
class KeypairLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of key pairs exceeded")
class SecurityGroupLimitExceeded(QuotaError):
ec2_code = 'SecurityGroupLimitExceeded'
msg_fmt = _("Maximum number of security groups or rules exceeded")
class PortLimitExceeded(QuotaError):
msg_fmt = _("Maximum number of ports exceeded")
class AggregateError(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s: action '%(action)s' "
"caused an error: %(reason)s.")
class AggregateNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s could not be found.")
class AggregateNameExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_name)s already exists.")
class AggregateHostNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no host %(host)s.")
class AggregateMetadataNotFound(NotFound):
msg_fmt = _("Aggregate %(aggregate_id)s has no metadata with "
"key %(metadata_key)s.")
class AggregateHostExists(NovaException):
msg_fmt = _("Aggregate %(aggregate_id)s already has host %(host)s.")
class FlavorCreateFailed(NovaException):
msg_fmt = _("Unable to create flavor")
class InstancePasswordSetFailed(NovaException):
msg_fmt = _("Failed to set admin password on %(instance)s "
"because %(reason)s")
safe = True
class InstanceNotFound(NotFound):
ec2_code = 'InvalidInstanceID.NotFound'
msg_fmt = _("Instance %(instance_id)s could not be found.")
class InstanceInfoCacheNotFound(NotFound):
msg_fmt = _("Info cache for instance %(instance_uuid)s could not be "
"found.")
class NodeNotFound(NotFound):
msg_fmt = _("Node %(node_id)s could not be found.")
class NodeNotFoundByUUID(NotFound):
msg_fmt = _("Node with UUID %(node_uuid)s could not be found.")
class MarkerNotFound(NotFound):
msg_fmt = _("Marker %(marker)s could not be found.")
class InvalidInstanceIDMalformed(Invalid):
msg_fmt = _("Invalid id: %(instance_id)s (expecting \"i-...\")")
ec2_code = 'InvalidInstanceID.Malformed'
class InvalidVolumeIDMalformed(Invalid):
msg_fmt = _("Invalid id: %(volume_id)s (expecting \"i-...\")")
ec2_code = 'InvalidVolumeID.Malformed'
class CouldNotFetchImage(NovaException):
msg_fmt = _("Could not fetch image %(image_id)s")
class CouldNotUploadImage(NovaException):
msg_fmt = _("Could not upload image %(image_id)s")
class TaskAlreadyRunning(NovaException):
msg_fmt = _("Task %(task_name)s is already running on host %(host)s")
class TaskNotRunning(NovaException):
msg_fmt = _("Task %(task_name)s is not running on host %(host)s")
class InstanceIsLocked(InstanceInvalidState):
msg_fmt = _("Instance %(instance_uuid)s is locked")
class ConfigDriveInvalidValue(Invalid):
msg_fmt = _("Invalid value for Config Drive option: %(option)s")
class ConfigDriveMountFailed(NovaException):
msg_fmt = _("Could not mount vfat config drive. %(operation)s failed. "
"Error: %(error)s")
class ConfigDriveUnknownFormat(NovaException):
msg_fmt = _("Unknown config drive format %(format)s. Select one of "
"iso9660 or vfat.")
class InterfaceAttachFailed(Invalid):
msg_fmt = _("Failed to attach network adapter device to "
"%(instance_uuid)s")
class InterfaceDetachFailed(Invalid):
msg_fmt = _("Failed to detach network adapter device from "
"%(instance_uuid)s")
class InstanceUserDataTooLarge(NovaException):
msg_fmt = _("User data too large. User data must be no larger than "
"%(maxsize)s bytes once base64 encoded. Your data is "
"%(length)d bytes")
class InstanceUserDataMalformed(NovaException):
msg_fmt = _("User data needs to be valid base 64.")
class UnexpectedTaskStateError(NovaException):
msg_fmt = _("Unexpected task state: expecting %(expected)s but "
"the actual state is %(actual)s")
class UnexpectedDeletingTaskStateError(UnexpectedTaskStateError):
pass
class InstanceActionNotFound(NovaException):
msg_fmt = _("Action for request_id %(request_id)s on instance"
" %(instance_uuid)s not found")
class InstanceActionEventNotFound(NovaException):
msg_fmt = _("Event %(event)s not found for action id %(action_id)s")
class UnexpectedVMStateError(NovaException):
msg_fmt = _("Unexpected VM state: expecting %(expected)s but "
"the actual state is %(actual)s")
class CryptoCAFileNotFound(FileNotFound):
msg_fmt = _("The CA file for %(project)s could not be found")
class CryptoCRLFileNotFound(FileNotFound):
msg_fmt = _("The CRL file for %(project)s could not be found")
class InstanceRecreateNotSupported(Invalid):
msg_fmt = _('Instance recreate is not supported.')
class ServiceGroupUnavailable(NovaException):
msg_fmt = _("The service from servicegroup driver %(driver)s is "
"temporarily unavailable.")
class DBNotAllowed(NovaException):
msg_fmt = _('%(binary)s attempted direct database access which is '
'not allowed by policy')
class UnsupportedVirtType(Invalid):
msg_fmt = _("Virtualization type '%(virt)s' is not supported by "
"this compute driver")
class UnsupportedHardware(Invalid):
msg_fmt = _("Requested hardware '%(model)s' is not supported by "
"the '%(virt)s' virt driver")
class Base64Exception(NovaException):
msg_fmt = _("Invalid Base 64 data for file %(path)s")
class BuildAbortException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s aborted: %(reason)s")
class RescheduledException(NovaException):
msg_fmt = _("Build of instance %(instance_uuid)s was re-scheduled: "
"%(reason)s")
class ShadowTableExists(NovaException):
msg_fmt = _("Shadow table with name %(name)s already exists.")
class InstanceFaultRollback(NovaException):
def __init__(self, inner_exception=None):
message = _("Instance rollback performed due to: %s")
self.inner_exception = inner_exception
super(InstanceFaultRollback, self).__init__(message % inner_exception)
class UnsupportedObjectError(NovaException):
msg_fmt = _('Unsupported object type %(objtype)s')
class OrphanedObjectError(NovaException):
msg_fmt = _('Cannot call %(method)s on orphaned %(objtype)s object')
class IncompatibleObjectVersion(NovaException):
msg_fmt = _('Version %(objver)s of %(objname)s is not supported')
class ReadOnlyFieldError(NovaException):
msg_fmt = _('Cannot modify readonly field %(field)s')
class ObjectActionError(NovaException):
msg_fmt = _('Object action %(action)s failed because: %(reason)s')
class ObjectFieldInvalid(NovaException):
msg_fmt = _('Field %(field)s of %(objname)s is not an instance of Field')
class CoreAPIMissing(NovaException):
msg_fmt = _("Core API extensions are missing: %(missing_apis)s")
class AgentError(NovaException):
msg_fmt = _('Error during following call to agent: %(method)s')
class AgentTimeout(AgentError):
msg_fmt = _('Unable to contact guest agent. '
'The following call timed out: %(method)s')
class AgentNotImplemented(AgentError):
msg_fmt = _('Agent does not support the call: %(method)s')
class InstanceGroupNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s could not be found.")
class InstanceGroupIdExists(NovaException):
msg_fmt = _("Instance group %(group_uuid)s already exists.")
class InstanceGroupMetadataNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no metadata with "
"key %(metadata_key)s.")
class InstanceGroupMemberNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no member with "
"id %(instance_id)s.")
class InstanceGroupPolicyNotFound(NotFound):
msg_fmt = _("Instance group %(group_uuid)s has no policy %(policy)s.")
class PluginRetriesExceeded(NovaException):
msg_fmt = _("Number of retries to plugin (%(num_retries)d) exceeded.")
class ImageDownloadModuleError(NovaException):
msg_fmt = _("There was an error with the download module %(module)s. "
"%(reason)s")
class ImageDownloadModuleMetaDataError(ImageDownloadModuleError):
msg_fmt = _("The metadata for this location will not work with this "
"module %(module)s. %(reason)s.")
class ImageDownloadModuleNotImplementedError(ImageDownloadModuleError):
msg_fmt = _("The method %(method_name)s is not implemented.")
class ImageDownloadModuleConfigurationError(ImageDownloadModuleError):
msg_fmt = _("The module %(module)s is misconfigured: %(reason)s.")
class ResourceMonitorError(NovaException):
msg_fmt = _("Error when creating resource monitor: %(monitor)s")
class PciDeviceWrongAddressFormat(NovaException):
msg_fmt = _("The PCI address %(address)s has an incorrect format.")
class PciDeviceInvalidAddressField(NovaException):
msg_fmt = _("Invalid PCI Whitelist: "
"The PCI address %(address)s has an invalid %(field)s.")
class PciDeviceInvalidDeviceName(NovaException):
msg_fmt = _("Invalid PCI Whitelist: "
"The PCI whitelist can specify devname or address,"
" but not both")
class PciDeviceNotFoundById(NotFound):
msg_fmt = _("PCI device %(id)s not found")
class PciDeviceNotFound(NovaException):
msg_fmt = _("PCI Device %(node_id)s:%(address)s not found.")
class PciDeviceInvalidStatus(NovaException):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is %(status)s "
"instead of %(hopestatus)s")
class PciDeviceInvalidOwner(NovaException):
msg_fmt = _(
"PCI device %(compute_node_id)s:%(address)s is owned by %(owner)s "
"instead of %(hopeowner)s")
class PciDeviceRequestFailed(NovaException):
msg_fmt = _(
"PCI device request (%requests)s failed")
class PciDevicePoolEmpty(NovaException):
msg_fmt = _(
"Attempt to consume PCI device %(compute_node_id)s:%(address)s "
"from empty pool")
class PciInvalidAlias(NovaException):
msg_fmt = _("Invalid PCI alias definition: %(reason)s")
class PciRequestAliasNotDefined(NovaException):
msg_fmt = _("PCI alias %(alias)s is not defined")
class MissingParameter(NovaException):
ec2_code = 'MissingParameter'
msg_fmt = _("Not enough parameters: %(reason)s")
code = 400
class PciConfigInvalidWhitelist(Invalid):
msg_fmt = _("Invalid PCI devices Whitelist config %(reason)s")
class PciTrackerInvalidNodeId(NovaException):
msg_fmt = _("Cannot change %(node_id)s to %(new_node_id)s")
# Cannot be templated, msg needs to be constructed when raised.
class InternalError(NovaException):
ec2_code = 'InternalError'
msg_fmt = "%(err)s"
class PciDevicePrepareFailed(NovaException):
msg_fmt = _("Failed to prepare PCI device %(id)s for instance "
"%(instance_uuid)s: %(reason)s")
class PciDeviceDetachFailed(NovaException):
msg_fmt = _("Failed to detach PCI device %(dev)s: %(reason)s")
class PciDeviceUnsupportedHypervisor(NovaException):
msg_fmt = _("%(type)s hypervisor does not support PCI devices")
class KeyManagerError(NovaException):
msg_fmt = _("Key manager error: %(reason)s")
class VolumesNotRemoved(Invalid):
msg_fmt = _("Failed to remove volume(s): (%(reason)s)")
class InvalidVideoMode(Invalid):
msg_fmt = _("Provided video model (%(model)s) is not supported.")
class RngDeviceNotExist(Invalid):
msg_fmt = _("The provided RNG device path: (%(path)s) is not "
"present on the host.")
class RequestedVRamTooHigh(NovaException):
msg_fmt = _("The requested amount of video memory %(req_vram)d is higher "
"than the maximum allowed by flavor %(max_vram)d.")
class InvalidWatchdogAction(Invalid):
msg_fmt = _("Provided watchdog action (%(action)s) is not supported.")
class LiveMigrationWithOldNovaNotSafe(NovaException):
msg_fmt = _("Host %(server)s is running an old version of Nova, "
"live migrations involving that version may cause data loss. "
"Upgrade Nova on %(server)s and try again.")
class UnshelveException(NovaException):
msg_fmt = _("Error during unshelve instance %(instance_id)s: %(reason)s")
class ImageVCPULimitsRangeExceeded(Invalid):
msg_fmt = _("Image vCPU limits %(sockets)d:%(cores)d:%(threads)d "
"exceeds permitted %(maxsockets)d:%(maxcores)d:%(maxthreads)d")
class ImageVCPUTopologyRangeExceeded(Invalid):
msg_fmt = _("Image vCPU topology %(sockets)d:%(cores)d:%(threads)d "
"exceeds permitted %(maxsockets)d:%(maxcores)d:%(maxthreads)d")
class ImageVCPULimitsRangeImpossible(Invalid):
msg_fmt = _("Requested vCPU limits %(sockets)d:%(cores)d:%(threads)d "
"are impossible to satisfy for vcpus count %(vcpus)d")
class InvalidArchitectureName(Invalid):
msg_fmt = _("Architecture name '%(arch)s' is not recognised")
class ImageNUMATopologyIncomplete(Invalid):
msg_fmt = _("CPU and memory allocation must be provided for all "
"NUMA nodes")
class ImageNUMATopologyForbidden(Invalid):
msg_fmt = _("Image property '%(name)s' is not permitted to override "
"NUMA configuration set against the flavor")
class ImageNUMATopologyAsymmetric(Invalid):
msg_fmt = _("Asymmetric NUMA topologies require explicit assignment "
"of CPUs and memory to nodes in image or flavor")
class ImageNUMATopologyCPUOutOfRange(Invalid):
msg_fmt = _("CPU number %(cpunum)d is larger than max %(cpumax)d")
class ImageNUMATopologyCPUDuplicates(Invalid):
msg_fmt = _("CPU number %(cpunum)d is assigned to two nodes")
class ImageNUMATopologyCPUsUnassigned(Invalid):
msg_fmt = _("CPU number %(cpuset)s is not assigned to any node")
class ImageNUMATopologyMemoryOutOfRange(Invalid):
msg_fmt = _("%(memsize)d MB of memory assigned, but expected "
"%(memtotal)d MB")
class InvalidHostname(Invalid):
msg_fmt = _("Invalid characters in hostname '%(hostname)s'")
class NumaTopologyNotFound(NotFound):
msg_fmt = _("Instance %(instance_uuid)s does not specify a NUMA topology")
class SocketPortRangeExhaustedException(NovaException):
msg_fmt = _("Not able to acquire a free port for %(host)s")
class SocketPortInUseException(NovaException):
msg_fmt = _("Not able to bind %(host)s:%(port)d, %(error)s")
class ImageSerialPortNumberInvalid(Invalid):
msg_fmt = _("Number of serial ports '%(num_ports)s' specified in "
"'%(property)s' isn't valid.")
class ImageSerialPortNumberExceedFlavorValue(Invalid):
msg_fmt = _("Forbidden to exceed flavor value of number of serial "
"ports passed in image meta.")
class InvalidImageConfigDrive(Invalid):
msg_fmt = _("Image's config drive option '%(config_drive)s' is invalid")
|
jumpstarter-io/nova
|
nova/exception.py
|
Python
|
apache-2.0
| 49,700
|
# Copyright (c) 2019 Ericsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.services.network import base
class QosMinimumBandwidthRulesClient(base.BaseNetworkClient):
def create_minimum_bandwidth_rule(self, qos_policy_id, **kwargs):
"""Creates a minimum bandwidth rule for a QoS policy.
For full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#create-minimum-bandwidth-rule
"""
uri = '/qos/policies/%s/minimum_bandwidth_rules' % qos_policy_id
post_data = {'minimum_bandwidth_rule': kwargs}
return self.create_resource(uri, post_data)
def update_minimum_bandwidth_rule(self, qos_policy_id, rule_id, **kwargs):
"""Updates a minimum bandwidth rule.
For full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#update-minimum-bandwidth-rule
"""
uri = '/qos/policies/%s/minimum_bandwidth_rules/%s' % (
qos_policy_id, rule_id)
post_data = {'minimum_bandwidth_rule': kwargs}
return self.update_resource(uri, post_data)
def show_minimum_bandwidth_rule(self, qos_policy_id, rule_id, **fields):
"""Show details of a minimum bandwidth rule.
For full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#show-minimum-bandwidth-rule-details
"""
uri = '/qos/policies/%s/minimum_bandwidth_rules/%s' % (
qos_policy_id, rule_id)
return self.show_resource(uri, **fields)
def delete_minimum_bandwidth_rule(self, qos_policy_id, rule_id):
"""Deletes a minimum bandwidth rule for a QoS policy.
For full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#delete-minimum-bandwidth-rule
"""
uri = '/qos/policies/%s/minimum_bandwidth_rules/%s' % (
qos_policy_id, rule_id)
return self.delete_resource(uri)
def list_minimum_bandwidth_rules(self, qos_policy_id, **filters):
"""Lists all minimum bandwidth rules for a QoS policy.
For full list of available parameters, please refer to the official
API reference:
https://docs.openstack.org/api-ref/network/v2/index.html#list-minimum-bandwidth-rules-for-qos-policy
"""
uri = '/qos/policies/%s/minimum_bandwidth_rules' % qos_policy_id
return self.list_resources(uri, **filters)
|
masayukig/tempest
|
tempest/lib/services/network/qos_minimum_bandwidth_rules_client.py
|
Python
|
apache-2.0
| 3,226
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.