code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
"""
connects database and establishes Models
"""
import secrets
import os
from flask import Flask
from peewee import *
from playhouse.flask_utils import FlaskDB
from playhouse.db_url import connect
dbUrl = 'mysql://{0}:{1}@{2}:{3}/{4}'.format(
os.environ['DB_USER'],
os.environ['DB_PW'],
os.environ['DB_HOST'],
os.environ['DB_PORT'],
os.environ['DB_NAME']
)
db = connect(dbUrl)
app = Flask(__name__)
app.config.from_object(__name__)
#db_wrapper = FlaskDB(app)
class BaseModel(Model):
class Meta:
database = db
class Tea(BaseModel):
"""
tea table
"""
name = CharField()
link = CharField()
data = TextField()
db.create_tables([Tea], safe=True)
|
niole/gotea
|
dbConnection.py
|
Python
|
unlicense
| 705
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
from debug import oprint
from utils import doc_number_normalize, name_clean
from suscriber import Suscriber
class IngresosMember(models.Model, Suscriber):
_name = "infocoop.ingresos_member"
master_id = fields.Many2one('infocoop_ingresos', ondelete='cascade')
slave_id = fields.Many2one('res.partner')
mirror_dependencies = ["infocoop_ingresos", "infocoop_tablas"]
def prepare_row_fields(self, row):
sm = self.env["infocoop_socios_member"].search(
[("master_id", "=", row.id), ], limit=1)
if sm:
data = sm.prepare_row_fields(row)
else:
doc_type, doc_number = doc_number_normalize(
row.tipo_doc, row.nro_doc)
# TODO: This could be more efficient
ids = self.env["afip.document_type"].search(
(["active", "=", True], ["code", "=", doc_type]), limit=1)
if ids:
doc_type = ids[0].id
else:
doc_type = None
minutes_id = None
if row.acta:
minutes_id = self.env["minutes"].search(
(["number", "=", row.acta],), limit=1).id
if not minutes_id:
minutes_id = self.env["minutes"].create(
{"number": row.acta, "date": row.fec_acta}).id
localidad = self.env["infocoop_tablas"].search(
(["tema", "=", "T"],
["subtema", "=", "L"],
["codigo", "=", row.localidad]), limit=1)
if localidad:
city = localidad["concepto"].title()
else:
city = None
# TODO: This could be more efficient
responsability_id = self.env["afip.responsability"].search(
(["name", "=", "Consumidor Final"],
["active", "=", 1]), limit=1).id
data = {
"name": name_clean(row.nombre),
"document_number": doc_number,
"document_type_id": doc_type,
"birthdate": row.fec_nacim,
"affiliation_date": row.fec_ingr,
"membership_number": row.socio,
"phone": row.telefono,
"comment": row.observacio,
"admission_minutes_id": minutes_id,
"city": city,
"street": row.domicilio,
"zip": row.codpostal,
"responsability_id": responsability_id,
}
return data
def get_slave_from_row(self, row):
if row.socio > 0:
return self.env[self.slave_id._name].search(
[("membership_number", "=", row.socio), ], limit=1)
else:
return None
|
barct/odoo-coop
|
infocoop/models/ingresos_member.py
|
Python
|
gpl-3.0
| 3,065
|
# -*- coding: utf-8 -*-
import base64
import csv
import functools
import glob
import itertools
import jinja2
import logging
import operator
import datetime
import hashlib
import os
import re
import json
import sys
import time
import urllib2
import zlib
from xml.etree import ElementTree
from cStringIO import StringIO
import babel.messages.pofile
import werkzeug.utils
import werkzeug.wrappers
from openerp.api import Environment
try:
import xlwt
except ImportError:
xlwt = None
import openerp
import openerp.modules.registry
from openerp.addons.base.ir.ir_qweb import AssetsBundle, QWebTemplateNotFound
from openerp.modules import get_module_resource
from openerp.tools import topological_sort
from openerp.tools.translate import _
from openerp.tools import ustr
from openerp import http
import mimetypes
from openerp.http import request, serialize_exception as _serialize_exception, STATIC_CACHE
from openerp.exceptions import AccessError
_logger = logging.getLogger(__name__)
if hasattr(sys, 'frozen'):
# When running on compiled windows binary, we don't have access to package loader.
path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..', 'views'))
loader = jinja2.FileSystemLoader(path)
else:
loader = jinja2.PackageLoader('openerp.addons.web', "views")
env = jinja2.Environment(loader=loader, autoescape=True)
env.filters["json"] = json.dumps
# 1 week cache for asset bundles as advised by Google Page Speed
BUNDLE_MAXAGE = 60 * 60 * 24 * 7
#----------------------------------------------------------
# OpenERP Web helpers
#----------------------------------------------------------
db_list = http.db_list
db_monodb = http.db_monodb
def serialize_exception(f):
@functools.wraps(f)
def wrap(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception, e:
_logger.exception("An exception occured during an http request")
se = _serialize_exception(e)
error = {
'code': 200,
'message': "Odoo Server Error",
'data': se
}
return werkzeug.exceptions.InternalServerError(json.dumps(error))
return wrap
def redirect_with_hash(*args, **kw):
"""
.. deprecated:: 8.0
Use the ``http.redirect_with_hash()`` function instead.
"""
return http.redirect_with_hash(*args, **kw)
def abort_and_redirect(url):
r = request.httprequest
response = werkzeug.utils.redirect(url, 302)
response = r.app.get_response(r, response, explicit_session=False)
werkzeug.exceptions.abort(response)
def ensure_db(redirect='/web/database/selector'):
# This helper should be used in web client auth="none" routes
# if those routes needs a db to work with.
# If the heuristics does not find any database, then the users will be
# redirected to db selector or any url specified by `redirect` argument.
# If the db is taken out of a query parameter, it will be checked against
# `http.db_filter()` in order to ensure it's legit and thus avoid db
# forgering that could lead to xss attacks.
db = request.params.get('db')
# Ensure db is legit
if db and db not in http.db_filter([db]):
db = None
if db and not request.session.db:
# User asked a specific database on a new session.
# That mean the nodb router has been used to find the route
# Depending on installed module in the database, the rendering of the page
# may depend on data injected by the database route dispatcher.
# Thus, we redirect the user to the same page but with the session cookie set.
# This will force using the database route dispatcher...
r = request.httprequest
url_redirect = r.base_url
if r.query_string:
# Can't use werkzeug.wrappers.BaseRequest.url with encoded hashes:
# https://github.com/amigrave/werkzeug/commit/b4a62433f2f7678c234cdcac6247a869f90a7eb7
url_redirect += '?' + r.query_string
response = werkzeug.utils.redirect(url_redirect, 302)
request.session.db = db
abort_and_redirect(url_redirect)
# if db not provided, use the session one
if not db and request.session.db and http.db_filter([request.session.db]):
db = request.session.db
# if no database provided and no database in session, use monodb
if not db:
db = db_monodb(request.httprequest)
# if no db can be found til here, send to the database selector
# the database selector will redirect to database manager if needed
if not db:
werkzeug.exceptions.abort(werkzeug.utils.redirect(redirect, 303))
# always switch the session to the computed db
if db != request.session.db:
request.session.logout()
abort_and_redirect(request.httprequest.url)
request.session.db = db
def module_installed(environment):
# Candidates module the current heuristic is the /static dir
loadable = http.addons_manifest.keys()
# Retrieve database installed modules
# TODO The following code should move to ir.module.module.list_installed_modules()
Modules = environment['ir.module.module']
domain = [('state','=','installed'), ('name','in', loadable)]
modules = {
module.name: module.dependencies_id.mapped('name')
for module in Modules.search(domain)
}
sorted_modules = topological_sort(modules)
return sorted_modules
def module_installed_bypass_session(dbname):
try:
registry = openerp.modules.registry.RegistryManager.get(dbname)
with registry.cursor() as cr:
return module_installed(
environment=Environment(cr, openerp.SUPERUSER_ID, {}))
except Exception:
pass
return {}
def module_boot(db=None):
server_wide_modules = openerp.conf.server_wide_modules or ['web']
serverside = []
dbside = []
for i in server_wide_modules:
if i in http.addons_manifest:
serverside.append(i)
monodb = db or db_monodb()
if monodb:
dbside = module_installed_bypass_session(monodb)
dbside = [i for i in dbside if i not in serverside]
addons = serverside + dbside
return addons
def concat_xml(file_list):
"""Concatenate xml files
:param list(str) file_list: list of files to check
:returns: (concatenation_result, checksum)
:rtype: (str, str)
"""
checksum = hashlib.new('sha1')
if not file_list:
return '', checksum.hexdigest()
root = None
for fname in file_list:
with open(fname, 'rb') as fp:
contents = fp.read()
checksum.update(contents)
fp.seek(0)
xml = ElementTree.parse(fp).getroot()
if root is None:
root = ElementTree.Element(xml.tag)
#elif root.tag != xml.tag:
# raise ValueError("Root tags missmatch: %r != %r" % (root.tag, xml.tag))
for child in xml.getchildren():
root.append(child)
return ElementTree.tostring(root, 'utf-8'), checksum.hexdigest()
def fs2web(path):
"""convert FS path into web path"""
return '/'.join(path.split(os.path.sep))
def manifest_glob(extension, addons=None, db=None, include_remotes=False):
if addons is None:
addons = module_boot(db=db)
else:
addons = addons.split(',')
r = []
for addon in addons:
manifest = http.addons_manifest.get(addon, None)
if not manifest:
continue
# ensure does not ends with /
addons_path = os.path.join(manifest['addons_path'], '')[:-1]
globlist = manifest.get(extension, [])
for pattern in globlist:
if pattern.startswith(('http://', 'https://', '//')):
if include_remotes:
r.append((None, pattern))
else:
for path in glob.glob(os.path.normpath(os.path.join(addons_path, addon, pattern))):
r.append((path, fs2web(path[len(addons_path):])))
return r
def manifest_list(extension, mods=None, db=None, debug=None):
""" list resources to load specifying either:
mods: a comma separated string listing modules
db: a database name (return all installed modules in that database)
"""
if debug is not None:
_logger.warning("openerp.addons.web.main.manifest_list(): debug parameter is deprecated")
files = manifest_glob(extension, addons=mods, db=db, include_remotes=True)
return [wp for _fp, wp in files]
def get_last_modified(files):
""" Returns the modification time of the most recently modified
file provided
:param list(str) files: names of files to check
:return: most recent modification time amongst the fileset
:rtype: datetime.datetime
"""
files = list(files)
if files:
return max(datetime.datetime.fromtimestamp(os.path.getmtime(f))
for f in files)
return datetime.datetime(1970, 1, 1)
def make_conditional(response, last_modified=None, etag=None, max_age=0):
""" Makes the provided response conditional based upon the request,
and mandates revalidation from clients
Uses Werkzeug's own :meth:`ETagResponseMixin.make_conditional`, after
setting ``last_modified`` and ``etag`` correctly on the response object
:param response: Werkzeug response
:type response: werkzeug.wrappers.Response
:param datetime.datetime last_modified: last modification date of the response content
:param str etag: some sort of checksum of the content (deep etag)
:return: the response object provided
:rtype: werkzeug.wrappers.Response
"""
response.cache_control.must_revalidate = True
response.cache_control.max_age = max_age
if last_modified:
response.last_modified = last_modified
if etag:
response.set_etag(etag)
return response.make_conditional(request.httprequest)
def login_and_redirect(db, login, key, redirect_url='/web'):
request.session.authenticate(db, login, key)
return set_cookie_and_redirect(redirect_url)
def set_cookie_and_redirect(redirect_url):
redirect = werkzeug.utils.redirect(redirect_url, 303)
redirect.autocorrect_location_header = False
return redirect
def load_actions_from_ir_values(key, key2, models, meta):
Values = request.session.model('ir.values')
actions = Values.get(key, key2, models, meta, request.context)
return [(id, name, clean_action(action))
for id, name, action in actions]
def clean_action(action):
action.setdefault('flags', {})
action_type = action.setdefault('type', 'ir.actions.act_window_close')
if action_type == 'ir.actions.act_window':
return fix_view_modes(action)
return action
# I think generate_views,fix_view_modes should go into js ActionManager
def generate_views(action):
"""
While the server generates a sequence called "views" computing dependencies
between a bunch of stuff for views coming directly from the database
(the ``ir.actions.act_window model``), it's also possible for e.g. buttons
to return custom view dictionaries generated on the fly.
In that case, there is no ``views`` key available on the action.
Since the web client relies on ``action['views']``, generate it here from
``view_mode`` and ``view_id``.
Currently handles two different cases:
* no view_id, multiple view_mode
* single view_id, single view_mode
:param dict action: action descriptor dictionary to generate a views key for
"""
view_id = action.get('view_id') or False
if isinstance(view_id, (list, tuple)):
view_id = view_id[0]
# providing at least one view mode is a requirement, not an option
view_modes = action['view_mode'].split(',')
if len(view_modes) > 1:
if view_id:
raise ValueError('Non-db action dictionaries should provide '
'either multiple view modes or a single view '
'mode and an optional view id.\n\n Got view '
'modes %r and view id %r for action %r' % (
view_modes, view_id, action))
action['views'] = [(False, mode) for mode in view_modes]
return
action['views'] = [(view_id, view_modes[0])]
def fix_view_modes(action):
""" For historical reasons, OpenERP has weird dealings in relation to
view_mode and the view_type attribute (on window actions):
* one of the view modes is ``tree``, which stands for both list views
and tree views
* the choice is made by checking ``view_type``, which is either
``form`` for a list view or ``tree`` for an actual tree view
This methods simply folds the view_type into view_mode by adding a
new view mode ``list`` which is the result of the ``tree`` view_mode
in conjunction with the ``form`` view_type.
TODO: this should go into the doc, some kind of "peculiarities" section
:param dict action: an action descriptor
:returns: nothing, the action is modified in place
"""
if not action.get('views'):
generate_views(action)
if action.pop('view_type', 'form') != 'form':
return action
if 'view_mode' in action:
action['view_mode'] = ','.join(
mode if mode != 'tree' else 'list'
for mode in action['view_mode'].split(','))
action['views'] = [
[id, mode if mode != 'tree' else 'list']
for id, mode in action['views']
]
return action
def _local_web_translations(trans_file):
messages = []
try:
with open(trans_file) as t_file:
po = babel.messages.pofile.read_po(t_file)
except Exception:
return
for x in po:
if x.id and x.string and "openerp-web" in x.auto_comments:
messages.append({'id': x.id, 'string': x.string})
return messages
def xml2json_from_elementtree(el, preserve_whitespaces=False):
""" xml2json-direct
Simple and straightforward XML-to-JSON converter in Python
New BSD Licensed
http://code.google.com/p/xml2json-direct/
"""
res = {}
if el.tag[0] == "{":
ns, name = el.tag.rsplit("}", 1)
res["tag"] = name
res["namespace"] = ns[1:]
else:
res["tag"] = el.tag
res["attrs"] = {}
for k, v in el.items():
res["attrs"][k] = v
kids = []
if el.text and (preserve_whitespaces or el.text.strip() != ''):
kids.append(el.text)
for kid in el:
kids.append(xml2json_from_elementtree(kid, preserve_whitespaces))
if kid.tail and (preserve_whitespaces or kid.tail.strip() != ''):
kids.append(kid.tail)
res["children"] = kids
return res
def content_disposition(filename):
filename = ustr(filename)
escaped = urllib2.quote(filename.encode('utf8'))
browser = request.httprequest.user_agent.browser
version = int((request.httprequest.user_agent.version or '0').split('.')[0])
if browser == 'msie' and version < 9:
return "attachment; filename=%s" % escaped
elif browser == 'safari' and version < 537:
return u"attachment; filename=%s" % filename.encode('ascii', 'replace')
else:
return "attachment; filename*=UTF-8''%s" % escaped
def binary_content(xmlid=None, model='ir.attachment', id=None, field='datas', unique=False, filename=None, filename_field='datas_fname', download=False, mimetype=None, default_mimetype='application/octet-stream', env=None):
""" Get file, attachment or downloadable content
If the ``xmlid`` and ``id`` parameter is omitted, fetches the default value for the
binary field (via ``default_get``), otherwise fetches the field for
that precise record.
:param str xmlid: xmlid of the record
:param str model: name of the model to fetch the binary from
:param int id: id of the record from which to fetch the binary
:param str field: binary field
:param bool unique: add a max-age for the cache control
:param str filename: choose a filename
:param str filename_field: if not create an filename with model-id-field
:param bool download: apply headers to download the file
:param str mimetype: mintype of the field (for headers)
:param str default_mimetype: default mintype if no mintype found
:param Environment env: by default use request.env
:returns: (status, headers, content)
"""
env = env or request.env
# get object and content
obj = None
if xmlid:
obj = env.ref(xmlid, False)
elif id and model in env.registry:
obj = env[model].browse(int(id))
# obj exists
if not obj or not obj.exists() or field not in obj:
return (404, [], None)
# check read access
try:
last_update = obj['__last_update']
except AccessError:
return (403, [], None)
status = 200
# filename
if not filename:
if filename_field in obj:
filename = obj[filename_field]
else:
filename = "%s-%s-%s" % (obj._model._name, obj.id, field)
# mimetype
if not mimetype:
if 'mimetype' in obj and obj.mimetype and obj.mimetype != 'application/octet-stream':
mimetype = obj.mimetype
elif filename:
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype:
mimetype = default_mimetype
headers = [('Content-Type', mimetype)]
# cache
etag = hasattr(request, 'httprequest') and request.httprequest.headers.get('If-None-Match')
retag = hashlib.md5(last_update).hexdigest()
if etag == retag:
status = 304
headers.append(('ETag', retag))
if unique:
headers.append(('Cache-Control', 'max-age=%s' % STATIC_CACHE))
else:
headers.append(('Cache-Control', 'max-age=0'))
# content-disposition default name
if download:
headers.append(('Content-Disposition', content_disposition(filename)))
# get content after cache control
if model == 'ir.attachment' and obj.type == 'url' and obj.url:
status = 301
content = obj.url
else:
content = obj[field] or ''
return (status, headers, content)
#----------------------------------------------------------
# OpenERP Web web Controllers
#----------------------------------------------------------
class Home(http.Controller):
@http.route('/', type='http', auth="none")
def index(self, s_action=None, db=None, **kw):
return http.local_redirect('/web', query=request.params, keep_hash=True)
# ideally, this route should be `auth="user"` but that don't work in non-monodb mode.
@http.route('/web', type='http', auth="none")
def web_client(self, s_action=None, **kw):
ensure_db()
if not request.session.uid:
return werkzeug.utils.redirect('/web/login', 303)
if kw.get('redirect'):
return werkzeug.utils.redirect(kw.get('redirect'), 303)
request.uid = request.session.uid
menu_data = request.registry['ir.ui.menu'].load_menus(request.cr, request.uid, request.debug, context=request.context)
return request.render('web.webclient_bootstrap', qcontext={'menu_data': menu_data})
@http.route('/web/dbredirect', type='http', auth="none")
def web_db_redirect(self, redirect='/', **kw):
ensure_db()
return werkzeug.utils.redirect(redirect, 303)
@http.route('/web/login', type='http', auth="none")
def web_login(self, redirect=None, **kw):
ensure_db()
request.params['login_success'] = False
if request.httprequest.method == 'GET' and redirect and request.session.uid:
return http.redirect_with_hash(redirect)
if not request.uid:
request.uid = openerp.SUPERUSER_ID
values = request.params.copy()
try:
values['databases'] = http.db_list()
except openerp.exceptions.AccessDenied:
values['databases'] = None
if request.httprequest.method == 'POST':
old_uid = request.uid
uid = request.session.authenticate(request.session.db, request.params['login'], request.params['password'])
if uid is not False:
request.params['login_success'] = True
if not redirect:
redirect = '/web'
return http.redirect_with_hash(redirect)
request.uid = old_uid
values['error'] = "Wrong login/password"
return request.render('web.login', values)
class WebClient(http.Controller):
@http.route('/web/webclient/csslist', type='json', auth="none")
def csslist(self, mods=None):
return manifest_list('css', mods=mods)
@http.route('/web/webclient/jslist', type='json', auth="none")
def jslist(self, mods=None):
return manifest_list('js', mods=mods)
@http.route('/web/webclient/locale/<string:lang>', type='http', auth="none")
def load_locale(self, lang):
magic_file_finding = [lang.replace("_",'-').lower(), lang.split('_')[0]]
addons_path = http.addons_manifest['web']['addons_path']
#load momentjs locale
momentjs_locale_file = False
momentjs_locale = ""
for code in magic_file_finding:
try:
with open(os.path.join(addons_path, 'web', 'static', 'lib', 'moment', 'locale', code + '.js'), 'r') as f:
momentjs_locale = f.read()
#we found a locale matching so we can exit
break
except IOError:
continue
#return the content of the locale
headers = [('Content-Type', 'application/javascript'), ('Cache-Control', 'max-age=%s' % (36000))]
return request.make_response(momentjs_locale, headers)
@http.route('/web/webclient/qweb', type='http', auth="none")
def qweb(self, mods=None, db=None):
files = [f[0] for f in manifest_glob('qweb', addons=mods, db=db)]
last_modified = get_last_modified(files)
if request.httprequest.if_modified_since and request.httprequest.if_modified_since >= last_modified:
return werkzeug.wrappers.Response(status=304)
content, checksum = concat_xml(files)
return make_conditional(
request.make_response(content, [('Content-Type', 'text/xml')]),
last_modified, checksum)
@http.route('/web/webclient/bootstrap_translations', type='json', auth="none")
def bootstrap_translations(self, mods):
""" Load local translations from *.po files, as a temporary solution
until we have established a valid session. This is meant only
for translating the login page and db management chrome, using
the browser's language. """
# For performance reasons we only load a single translation, so for
# sub-languages (that should only be partially translated) we load the
# main language PO instead - that should be enough for the login screen.
lang = request.lang.split('_')[0]
translations_per_module = {}
for addon_name in mods:
if http.addons_manifest[addon_name].get('bootstrap'):
addons_path = http.addons_manifest[addon_name]['addons_path']
f_name = os.path.join(addons_path, addon_name, "i18n", lang + ".po")
if not os.path.exists(f_name):
continue
translations_per_module[addon_name] = {'messages': _local_web_translations(f_name)}
return {"modules": translations_per_module,
"lang_parameters": None}
@http.route('/web/webclient/translations', type='json', auth="none")
def translations(self, mods=None, lang=None):
request.disable_db = False
uid = openerp.SUPERUSER_ID
if mods is None:
m = request.registry.get('ir.module.module')
mods = [x['name'] for x in m.search_read(request.cr, uid,
[('state','=','installed')], ['name'])]
if lang is None:
lang = request.context["lang"]
res_lang = request.registry.get('res.lang')
ids = res_lang.search(request.cr, uid, [("code", "=", lang)])
lang_params = None
if ids:
lang_params = res_lang.read(request.cr, uid, ids[0],
["name", "direction", "date_format", "time_format", "grouping", "decimal_point", "thousands_sep"])
# Regional languages (ll_CC) must inherit/override their parent lang (ll), but this is
# done server-side when the language is loaded, so we only need to load the user's lang.
ir_translation = request.registry.get('ir.translation')
translations_per_module = {}
messages = ir_translation.search_read(request.cr, uid, [('module','in',mods),('lang','=',lang),
('comments','like','openerp-web'),('value','!=',False),
('value','!=','')],
['module','src','value','lang'], order='module')
for mod, msg_group in itertools.groupby(messages, key=operator.itemgetter('module')):
translations_per_module.setdefault(mod,{'messages':[]})
translations_per_module[mod]['messages'].extend({'id': m['src'],
'string': m['value']} \
for m in msg_group)
return {
'lang_parameters': lang_params,
'modules': translations_per_module,
'multi_lang': len(res_lang.get_installed(request.cr, uid)) > 1,
}
@http.route('/web/webclient/version_info', type='json', auth="none")
def version_info(self):
return openerp.service.common.exp_version()
@http.route('/web/tests', type='http', auth="none")
def index(self, mod=None, **kwargs):
return request.render('web.qunit_suite')
class Proxy(http.Controller):
@http.route('/web/proxy/load', type='json', auth="none")
def load(self, path):
""" Proxies an HTTP request through a JSON request.
It is strongly recommended to not request binary files through this,
as the result will be a binary data blob as well.
:param path: actual request path
:return: file content
"""
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
base_url = request.httprequest.base_url
return Client(request.httprequest.app, BaseResponse).get(path, base_url=base_url).data
@http.route('/web/proxy/post/<path:path>', type='http', auth='user', methods=['GET'])
def post(self, path):
"""Effectively execute a POST request that was hooked through user login"""
with request.session.load_request_data() as data:
if not data:
raise werkzeug.exceptions.BadRequest()
from werkzeug.test import Client
from werkzeug.wrappers import BaseResponse
base_url = request.httprequest.base_url
query_string = request.httprequest.query_string
client = Client(request.httprequest.app, BaseResponse)
headers = {'X-Openerp-Session-Id': request.session.sid}
return client.post('/' + path, base_url=base_url, query_string=query_string,
headers=headers, data=data)
class Database(http.Controller):
def _render_template(self, **d):
d.setdefault('manage',True)
d['insecure'] = openerp.tools.config['admin_passwd'] == 'admin'
d['list_db'] = openerp.tools.config['list_db']
d['langs'] = openerp.service.db.exp_list_lang()
# databases list
d['databases'] = []
try:
d['databases'] = http.db_list()
except openerp.exceptions.AccessDenied:
monodb = db_monodb()
if monodb:
d['databases'] = [monodb]
return env.get_template("database_manager.html").render(d)
@http.route('/web/database/selector', type='http', auth="none")
def selector(self, **kw):
return self._render_template(manage=False)
@http.route('/web/database/manager', type='http', auth="none")
def manager(self, **kw):
return self._render_template()
@http.route('/web/database/create', type='http', auth="none", methods=['POST'], csrf=False)
def create(self, master_pwd, name, lang, password, **post):
try:
request.session.proxy("db").create_database(master_pwd, name, bool(post.get('demo')), lang, password)
request.session.authenticate(name, 'admin', password)
return http.local_redirect('/web/')
except Exception, e:
error = "Database creation error: %s" % e
return self._render_template(error=error)
@http.route('/web/database/duplicate', type='http', auth="none", methods=['POST'], csrf=False)
def duplicate(self, master_pwd, name, new_name):
try:
request.session.proxy("db").duplicate_database(master_pwd, name, new_name)
return http.local_redirect('/web/database/manager')
except Exception, e:
error = "Database duplication error: %s" % e
return self._render_template(error=error)
@http.route('/web/database/drop', type='http', auth="none", methods=['POST'], csrf=False)
def drop(self, master_pwd, name):
try:
request.session.proxy("db").drop(master_pwd, name)
return http.local_redirect('/web/database/manager')
except Exception, e:
error = "Database deletion error: %s" % e
return self._render_template(error=error)
@http.route('/web/database/backup', type='http', auth="none", methods=['POST'], csrf=False)
def backup(self, master_pwd, name, backup_format = 'zip'):
try:
openerp.service.db.check_super(master_pwd)
ts = datetime.datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%S")
filename = "%s_%s.%s" % (name, ts, backup_format)
headers = [
('Content-Type', 'application/octet-stream; charset=binary'),
('Content-Disposition', content_disposition(filename)),
]
dump_stream = openerp.service.db.dump_db(name, None, backup_format)
response = werkzeug.wrappers.Response(dump_stream, headers=headers, direct_passthrough=True)
return response
except Exception, e:
_logger.exception('Database.backup')
error = "Database backup error: %s" % e
return self._render_template(error=error)
@http.route('/web/database/restore', type='http', auth="none", methods=['POST'], csrf=False)
def restore(self, master_pwd, backup_file, name, copy=False):
try:
data = base64.b64encode(backup_file.read())
request.session.proxy("db").restore(master_pwd, name, data, copy)
return http.local_redirect('/web/database/manager')
except Exception, e:
error = "Database restore error: %s" % e
return self._render_template(error=error)
@http.route('/web/database/change_password', type='http', auth="none", methods=['POST'], csrf=False)
def change_password(self, master_pwd, master_pwd_new):
try:
request.session.proxy("db").change_admin_password(master_pwd, master_pwd_new)
return http.local_redirect('/web/database/manager')
except Exception, e:
error = "Master password update error: %s" % e
return self._render_template(error=error)
class Session(http.Controller):
def session_info(self):
request.session.ensure_valid()
return {
"session_id": request.session_id,
"uid": request.session.uid,
"user_context": request.session.get_context() if request.session.uid else {},
"db": request.session.db,
"username": request.session.login,
"company_id": request.env.user.company_id.id if request.session.uid else None,
"partner_id": request.env.user.partner_id.id if request.session.uid and request.env.user.partner_id else None,
}
@http.route('/web/session/get_session_info', type='json', auth="none")
def get_session_info(self):
request.uid = request.session.uid
request.disable_db = False
return self.session_info()
@http.route('/web/session/authenticate', type='json', auth="none")
def authenticate(self, db, login, password, base_location=None):
request.session.authenticate(db, login, password)
return self.session_info()
@http.route('/web/session/change_password', type='json', auth="user")
def change_password(self, fields):
old_password, new_password,confirm_password = operator.itemgetter('old_pwd', 'new_password','confirm_pwd')(
dict(map(operator.itemgetter('name', 'value'), fields)))
if not (old_password.strip() and new_password.strip() and confirm_password.strip()):
return {'error':_('You cannot leave any password empty.'),'title': _('Change Password')}
if new_password != confirm_password:
return {'error': _('The new password and its confirmation must be identical.'),'title': _('Change Password')}
try:
if request.session.model('res.users').change_password(
old_password, new_password):
return {'new_password':new_password}
except Exception:
return {'error': _('The old password you provided is incorrect, your password was not changed.'), 'title': _('Change Password')}
return {'error': _('Error, password not changed !'), 'title': _('Change Password')}
@http.route('/web/session/get_lang_list', type='json', auth="none")
def get_lang_list(self):
try:
return request.session.proxy("db").list_lang() or []
except Exception, e:
return {"error": e, "title": _("Languages")}
@http.route('/web/session/modules', type='json', auth="user")
def modules(self):
# return all installed modules. Web client is smart enough to not load a module twice
return module_installed(environment=request.env(user=openerp.SUPERUSER_ID))
@http.route('/web/session/save_session_action', type='json', auth="user")
def save_session_action(self, the_action):
"""
This method store an action object in the session object and returns an integer
identifying that action. The method get_session_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
return request.httpsession.save_action(the_action)
@http.route('/web/session/get_session_action', type='json', auth="user")
def get_session_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_session_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
return request.httpsession.get_action(key)
@http.route('/web/session/check', type='json', auth="user")
def check(self):
request.session.assert_valid()
return None
@http.route('/web/session/destroy', type='json', auth="user")
def destroy(self):
request.session.logout()
@http.route('/web/session/logout', type='http', auth="none")
def logout(self, redirect='/web'):
request.session.logout(keep_db=True)
return werkzeug.utils.redirect(redirect, 303)
class Menu(http.Controller):
@http.route('/web/menu/load_needaction', type='json', auth="user")
def load_needaction(self, menu_ids):
""" Loads needaction counters for specific menu ids.
:return: needaction data
:rtype: dict(menu_id: {'needaction_enabled': boolean, 'needaction_counter': int})
"""
return request.session.model('ir.ui.menu').get_needaction_data(menu_ids, request.context)
class DataSet(http.Controller):
@http.route('/web/dataset/search_read', type='json', auth="user")
def search_read(self, model, fields=False, offset=0, limit=False, domain=None, sort=None):
return self.do_search_read(model, fields, offset, limit, domain, sort)
def do_search_read(self, model, fields=False, offset=0, limit=False, domain=None
, sort=None):
""" Performs a search() followed by a read() (if needed) using the
provided search criteria
:param str model: the name of the model to search on
:param fields: a list of the fields to return in the result records
:type fields: [str]
:param int offset: from which index should the results start being returned
:param int limit: the maximum number of records to return
:param list domain: the search domain for the query
:param list sort: sorting directives
:returns: A structure (dict) with two keys: ids (all the ids matching
the (domain, context) pair) and records (paginated records
matching fields selection set)
:rtype: list
"""
Model = request.session.model(model)
records = Model.search_read(domain, fields, offset or 0, limit or False, sort or False,
request.context)
if not records:
return {
'length': 0,
'records': []
}
if limit and len(records) == limit:
length = Model.search_count(domain, request.context)
else:
length = len(records) + (offset or 0)
return {
'length': length,
'records': records
}
@http.route('/web/dataset/load', type='json', auth="user")
def load(self, model, id, fields):
m = request.session.model(model)
value = {}
r = m.read([id], False, request.context)
if r:
value = r[0]
return {'value': value}
def call_common(self, model, method, args, domain_id=None, context_id=None):
return self._call_kw(model, method, args, {})
def _call_kw(self, model, method, args, kwargs):
if method.startswith('_'):
raise AccessError(_("Underscore prefixed methods cannot be remotely called"))
return getattr(request.registry.get(model), method)(request.cr, request.uid, *args, **kwargs)
@http.route('/web/dataset/call', type='json', auth="user")
def call(self, model, method, args, domain_id=None, context_id=None):
return self._call_kw(model, method, args, {})
@http.route(['/web/dataset/call_kw', '/web/dataset/call_kw/<path:path>'], type='json', auth="user")
def call_kw(self, model, method, args, kwargs, path=None):
return self._call_kw(model, method, args, kwargs)
@http.route('/web/dataset/call_button', type='json', auth="user")
def call_button(self, model, method, args, domain_id=None, context_id=None):
action = self._call_kw(model, method, args, {})
if isinstance(action, dict) and action.get('type') != '':
return clean_action(action)
return False
@http.route('/web/dataset/exec_workflow', type='json', auth="user")
def exec_workflow(self, model, id, signal):
return request.session.exec_workflow(model, id, signal)
@http.route('/web/dataset/resequence', type='json', auth="user")
def resequence(self, model, ids, field='sequence', offset=0):
""" Re-sequences a number of records in the model, by their ids
The re-sequencing starts at the first model of ``ids``, the sequence
number is incremented by one after each record and starts at ``offset``
:param ids: identifiers of the records to resequence, in the new sequence order
:type ids: list(id)
:param str field: field used for sequence specification, defaults to
"sequence"
:param int offset: sequence number for first record in ``ids``, allows
starting the resequencing from an arbitrary number,
defaults to ``0``
"""
m = request.session.model(model)
if not m.fields_get([field]):
return False
# python 2.6 has no start parameter
for i, id in enumerate(ids):
m.write(id, { field: i + offset })
return True
class View(http.Controller):
@http.route('/web/view/add_custom', type='json', auth="user")
def add_custom(self, view_id, arch):
CustomView = request.session.model('ir.ui.view.custom')
CustomView.create({
'user_id': request.session.uid,
'ref_id': view_id,
'arch': arch
}, request.context)
return {'result': True}
class TreeView(View):
@http.route('/web/treeview/action', type='json', auth="user")
def action(self, model, id):
return load_actions_from_ir_values(
'action', 'tree_but_open',[(model, id)],
False)
class Binary(http.Controller):
def placeholder(self, image='placeholder.png'):
addons_path = http.addons_manifest['web']['addons_path']
return open(os.path.join(addons_path, 'web', 'static', 'src', 'img', image), 'rb').read()
@http.route(['/web/content',
'/web/content/<string:xmlid>',
'/web/content/<string:xmlid>/<string:filename>',
'/web/content/<int:id>',
'/web/content/<int:id>/<string:filename>',
'/web/content/<int:id>-<string:unique>',
'/web/content/<int:id>-<string:unique>/<string:filename>',
'/web/content/<string:model>/<int:id>/<string:field>',
'/web/content/<string:model>/<int:id>/<string:field>/<string:filename>'], type='http', auth="public")
def content_common(self, xmlid=None, model='ir.attachment', id=None, field='datas', filename=None, filename_field='datas_fname', unique=None, mimetype=None, download=None, data=None, token=None):
status, headers, content = binary_content(xmlid=xmlid, model=model, id=id, field=field, unique=unique, filename=filename, filename_field=filename_field, download=download, mimetype=mimetype)
if status == 304:
response = werkzeug.wrappers.Response(status=status, headers=headers)
elif status == 301:
return werkzeug.utils.redirect(content, code=301)
elif status != 200:
response = request.not_found()
else:
content_base64 = base64.b64decode(content)
headers.append(('Content-Length', len(content_base64)))
response = request.make_response(content_base64, headers)
if token:
response.set_cookie('fileToken', token)
return response
@http.route(['/web/image',
'/web/image/<string:xmlid>',
'/web/image/<string:xmlid>/<string:filename>',
'/web/image/<string:xmlid>/<int:width>x<int:height>',
'/web/image/<string:xmlid>/<int:width>x<int:height>/<string:filename>',
'/web/image/<string:model>/<int:id>/<string:field>',
'/web/image/<string:model>/<int:id>/<string:field>/<string:filename>',
'/web/image/<string:model>/<int:id>/<string:field>/<int:width>x<int:height>',
'/web/image/<string:model>/<int:id>/<string:field>/<int:width>x<int:height>/<string:filename>',
'/web/image/<int:id>',
'/web/image/<int:id>/<string:filename>',
'/web/image/<int:id>/<int:width>x<int:height>',
'/web/image/<int:id>/<int:width>x<int:height>/<string:filename>',
'/web/image/<int:id>-<string:unique>',
'/web/image/<int:id>-<string:unique>/<string:filename>',
'/web/image/<int:id>-<string:unique>/<int:width>x<int:height>',
'/web/image/<int:id>-<string:unique>/<int:width>x<int:height>/<string:filename>'], type='http', auth="public")
def content_image(self, xmlid=None, model='ir.attachment', id=None, field='datas', filename_field='datas_fname', unique=None, filename=None, mimetype=None, download=None, width=0, height=0):
status, headers, content = binary_content(xmlid=xmlid, model=model, id=id, field=field, unique=unique, filename=filename, filename_field=filename_field, download=download, mimetype=mimetype, default_mimetype='image/png')
if status == 304:
return werkzeug.wrappers.Response(status=304, headers=headers)
elif status == 301:
return werkzeug.utils.redirect(content, code=301)
elif status != 200 and download:
return request.not_found()
if content and width and height:
# resize maximum 500*500
if width > 500:
width = 500
if height > 500:
height = 500
content = openerp.tools.image_resize_image(base64_source=content, size=(width, height), encoding='base64', filetype='PNG')
image_base64 = content and base64.b64decode(content) or self.placeholder()
headers.append(('Content-Length', len(image_base64)))
response = request.make_response(image_base64, headers)
response.status_code = status
return response
# backward compatibility
@http.route(['/web/binary/image'], type='http', auth="public")
def content_image_backward_compatibility(self, model, id, field, resize=None, **kw):
width = None
height = None
if resize:
width, height = resize.split(",")
return self.content_image(model=model, id=id, field=field, width=width, height=height)
@http.route('/web/binary/upload', type='http', auth="user")
@serialize_exception
def upload(self, callback, ufile):
# TODO: might be useful to have a configuration flag for max-length file uploads
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
data = ufile.read()
args = [len(data), ufile.filename,
ufile.content_type, base64.b64encode(data)]
except Exception, e:
args = [False, e.message]
return out % (json.dumps(callback), json.dumps(args))
@http.route('/web/binary/upload_attachment', type='http', auth="user")
@serialize_exception
def upload_attachment(self, callback, model, id, ufile):
Model = request.session.model('ir.attachment')
out = """<script language="javascript" type="text/javascript">
var win = window.top.window;
win.jQuery(win).trigger(%s, %s);
</script>"""
try:
attachment_id = Model.create({
'name': ufile.filename,
'datas': base64.encodestring(ufile.read()),
'datas_fname': ufile.filename,
'res_model': model,
'res_id': int(id)
}, request.context)
args = {
'filename': ufile.filename,
'mimetype': ufile.content_type,
'id': attachment_id
}
except Exception:
args = {'error': "Something horrible happened"}
_logger.exception("Fail to upload attachment %s" % ufile.filename)
return out % (json.dumps(callback), json.dumps(args))
@http.route([
'/web/binary/company_logo',
'/logo',
'/logo.png',
], type='http', auth="none", cors="*")
def company_logo(self, dbname=None, **kw):
imgname = 'logo.png'
placeholder = functools.partial(get_module_resource, 'web', 'static', 'src', 'img')
uid = None
if request.session.db:
dbname = request.session.db
uid = request.session.uid
elif dbname is None:
dbname = db_monodb()
if not uid:
uid = openerp.SUPERUSER_ID
if not dbname:
response = http.send_file(placeholder(imgname))
else:
try:
# create an empty registry
registry = openerp.modules.registry.Registry(dbname)
with registry.cursor() as cr:
cr.execute("""SELECT c.logo_web, c.write_date
FROM res_users u
LEFT JOIN res_company c
ON c.id = u.company_id
WHERE u.id = %s
""", (uid,))
row = cr.fetchone()
if row and row[0]:
image_data = StringIO(str(row[0]).decode('base64'))
response = http.send_file(image_data, filename=imgname, mtime=row[1])
else:
response = http.send_file(placeholder('nologo.png'))
except Exception:
response = http.send_file(placeholder(imgname))
return response
class Action(http.Controller):
@http.route('/web/action/load', type='json', auth="user")
def load(self, action_id, do_not_eval=False, additional_context=None):
Actions = request.session.model('ir.actions.actions')
value = False
try:
action_id = int(action_id)
except ValueError:
try:
module, xmlid = action_id.split('.', 1)
model, action_id = request.session.model('ir.model.data').get_object_reference(module, xmlid)
assert model.startswith('ir.actions.')
except Exception:
action_id = 0 # force failed read
base_action = Actions.read([action_id], ['type'], request.context)
if base_action:
ctx = request.context
action_type = base_action[0]['type']
if action_type == 'ir.actions.report.xml':
ctx.update({'bin_size': True})
if additional_context:
ctx.update(additional_context)
action = request.session.model(action_type).read([action_id], False, ctx)
if action:
value = clean_action(action[0])
return value
@http.route('/web/action/run', type='json', auth="user")
def run(self, action_id):
return_action = request.session.model('ir.actions.server').run(
[action_id], request.context)
if return_action:
return clean_action(return_action)
else:
return False
class Export(http.Controller):
@http.route('/web/export/formats', type='json', auth="user")
def formats(self):
""" Returns all valid export formats
:returns: for each export format, a pair of identifier and printable name
:rtype: [(str, str)]
"""
return [
{'tag': 'csv', 'label': 'CSV'},
{'tag': 'xls', 'label': 'Excel', 'error': None if xlwt else "XLWT required"},
]
def fields_get(self, model):
Model = request.session.model(model)
fields = Model.fields_get(False, request.context)
return fields
@http.route('/web/export/get_fields', type='json', auth="user")
def get_fields(self, model, prefix='', parent_name= '',
import_compat=True, parent_field_type=None,
exclude=None):
if import_compat and parent_field_type == "many2one":
fields = {}
else:
fields = self.fields_get(model)
if import_compat:
fields.pop('id', None)
else:
fields['.id'] = fields.pop('id', {'string': 'ID'})
fields_sequence = sorted(fields.iteritems(),
key=lambda field: openerp.tools.ustr(field[1].get('string', '')))
records = []
for field_name, field in fields_sequence:
if import_compat:
if exclude and field_name in exclude:
continue
if field.get('readonly'):
# If none of the field's states unsets readonly, skip the field
if all(dict(attrs).get('readonly', True)
for attrs in field.get('states', {}).values()):
continue
if not field.get('exportable', True):
continue
id = prefix + (prefix and '/'or '') + field_name
name = parent_name + (parent_name and '/' or '') + field['string']
record = {'id': id, 'string': name,
'value': id, 'children': False,
'field_type': field.get('type'),
'required': field.get('required'),
'relation_field': field.get('relation_field')}
records.append(record)
if len(name.split('/')) < 3 and 'relation' in field:
ref = field.pop('relation')
record['value'] += '/id'
record['params'] = {'model': ref, 'prefix': id, 'name': name}
if not import_compat or field['type'] == 'one2many':
# m2m field in import_compat is childless
record['children'] = True
return records
@http.route('/web/export/namelist', type='json', auth="user")
def namelist(self, model, export_id):
# TODO: namelist really has no reason to be in Python (although itertools.groupby helps)
export = request.session.model("ir.exports").read([export_id])[0]
export_fields_list = request.session.model("ir.exports.line").read(
export['export_fields'])
fields_data = self.fields_info(
model, map(operator.itemgetter('name'), export_fields_list))
return [
{'name': field['name'], 'label': fields_data[field['name']]}
for field in export_fields_list
]
def fields_info(self, model, export_fields):
info = {}
fields = self.fields_get(model)
if ".id" in export_fields:
fields['.id'] = fields.pop('id', {'string': 'ID'})
# To make fields retrieval more efficient, fetch all sub-fields of a
# given field at the same time. Because the order in the export list is
# arbitrary, this requires ordering all sub-fields of a given field
# together so they can be fetched at the same time
#
# Works the following way:
# * sort the list of fields to export, the default sorting order will
# put the field itself (if present, for xmlid) and all of its
# sub-fields right after it
# * then, group on: the first field of the path (which is the same for
# a field and for its subfields and the length of splitting on the
# first '/', which basically means grouping the field on one side and
# all of the subfields on the other. This way, we have the field (for
# the xmlid) with length 1, and all of the subfields with the same
# base but a length "flag" of 2
# * if we have a normal field (length 1), just add it to the info
# mapping (with its string) as-is
# * otherwise, recursively call fields_info via graft_subfields.
# all graft_subfields does is take the result of fields_info (on the
# field's model) and prepend the current base (current field), which
# rebuilds the whole sub-tree for the field
#
# result: because we're not fetching the fields_get for half the
# database models, fetching a namelist with a dozen fields (including
# relational data) falls from ~6s to ~300ms (on the leads model).
# export lists with no sub-fields (e.g. import_compatible lists with
# no o2m) are even more efficient (from the same 6s to ~170ms, as
# there's a single fields_get to execute)
for (base, length), subfields in itertools.groupby(
sorted(export_fields),
lambda field: (field.split('/', 1)[0], len(field.split('/', 1)))):
subfields = list(subfields)
if length == 2:
# subfields is a seq of $base/*rest, and not loaded yet
info.update(self.graft_subfields(
fields[base]['relation'], base, fields[base]['string'],
subfields
))
elif base in fields:
info[base] = fields[base]['string']
return info
def graft_subfields(self, model, prefix, prefix_string, fields):
export_fields = [field.split('/', 1)[1] for field in fields]
return (
(prefix + '/' + k, prefix_string + '/' + v)
for k, v in self.fields_info(model, export_fields).iteritems())
class ExportFormat(object):
raw_data = False
@property
def content_type(self):
""" Provides the format's content type """
raise NotImplementedError()
def filename(self, base):
""" Creates a valid filename for the format (with extension) from the
provided base name (exension-less)
"""
raise NotImplementedError()
def from_data(self, fields, rows):
""" Conversion method from OpenERP's export data to whatever the
current export class outputs
:params list fields: a list of fields to export
:params list rows: a list of records to export
:returns:
:rtype: bytes
"""
raise NotImplementedError()
def base(self, data, token):
params = json.loads(data)
model, fields, ids, domain, import_compat = \
operator.itemgetter('model', 'fields', 'ids', 'domain',
'import_compat')(
params)
Model = request.session.model(model)
context = dict(request.context or {}, **params.get('context', {}))
ids = ids or Model.search(domain, 0, False, False, context)
if not request.env[model]._is_an_ordinary_table():
fields = [field for field in fields if field['name'] != 'id']
field_names = map(operator.itemgetter('name'), fields)
import_data = Model.export_data(ids, field_names, self.raw_data, context=context).get('datas',[])
if import_compat:
columns_headers = field_names
else:
columns_headers = [val['label'].strip() for val in fields]
return request.make_response(self.from_data(columns_headers, import_data),
headers=[('Content-Disposition',
content_disposition(self.filename(model))),
('Content-Type', self.content_type)],
cookies={'fileToken': token})
class CSVExport(ExportFormat, http.Controller):
@http.route('/web/export/csv', type='http', auth="user")
@serialize_exception
def index(self, data, token):
return self.base(data, token)
@property
def content_type(self):
return 'text/csv;charset=utf8'
def filename(self, base):
return base + '.csv'
def from_data(self, fields, rows):
fp = StringIO()
writer = csv.writer(fp, quoting=csv.QUOTE_ALL)
writer.writerow([name.encode('utf-8') for name in fields])
for data in rows:
row = []
for d in data:
if isinstance(d, basestring):
d = d.replace('\n',' ').replace('\t',' ')
try:
d = d.encode('utf-8')
except UnicodeError:
pass
if d is False: d = None
row.append(d)
writer.writerow(row)
fp.seek(0)
data = fp.read()
fp.close()
return data
class ExcelExport(ExportFormat, http.Controller):
# Excel needs raw data to correctly handle numbers and date values
raw_data = True
@http.route('/web/export/xls', type='http', auth="user")
@serialize_exception
def index(self, data, token):
return self.base(data, token)
@property
def content_type(self):
return 'application/vnd.ms-excel'
def filename(self, base):
return base + '.xls'
def from_data(self, fields, rows):
workbook = xlwt.Workbook()
worksheet = workbook.add_sheet('Sheet 1')
for i, fieldname in enumerate(fields):
worksheet.write(0, i, fieldname)
worksheet.col(i).width = 8000 # around 220 pixels
base_style = xlwt.easyxf('align: wrap yes')
date_style = xlwt.easyxf('align: wrap yes', num_format_str='YYYY-MM-DD')
datetime_style = xlwt.easyxf('align: wrap yes', num_format_str='YYYY-MM-DD HH:mm:SS')
for row_index, row in enumerate(rows):
for cell_index, cell_value in enumerate(row):
cell_style = base_style
if isinstance(cell_value, basestring):
cell_value = re.sub("\r", " ", cell_value)
elif isinstance(cell_value, datetime.datetime):
cell_style = datetime_style
elif isinstance(cell_value, datetime.date):
cell_style = date_style
worksheet.write(row_index + 1, cell_index, cell_value, cell_style)
fp = StringIO()
workbook.save(fp)
fp.seek(0)
data = fp.read()
fp.close()
return data
class Reports(http.Controller):
POLLING_DELAY = 0.25
TYPES_MAPPING = {
'doc': 'application/vnd.ms-word',
'html': 'text/html',
'odt': 'application/vnd.oasis.opendocument.text',
'pdf': 'application/pdf',
'sxw': 'application/vnd.sun.xml.writer',
'xls': 'application/vnd.ms-excel',
}
@http.route('/web/report', type='http', auth="user")
@serialize_exception
def index(self, action, token):
action = json.loads(action)
report_srv = request.session.proxy("report")
context = dict(request.context)
context.update(action["context"])
report_data = {}
report_ids = context.get("active_ids", None)
if 'report_type' in action:
report_data['report_type'] = action['report_type']
if 'datas' in action:
if 'ids' in action['datas']:
report_ids = action['datas'].pop('ids')
report_data.update(action['datas'])
report_id = report_srv.report(
request.session.db, request.session.uid, request.session.password,
action["report_name"], report_ids,
report_data, context)
report_struct = None
while True:
report_struct = report_srv.report_get(
request.session.db, request.session.uid, request.session.password, report_id)
if report_struct["state"]:
break
time.sleep(self.POLLING_DELAY)
report = base64.b64decode(report_struct['result'])
if report_struct.get('code') == 'zlib':
report = zlib.decompress(report)
report_mimetype = self.TYPES_MAPPING.get(
report_struct['format'], 'octet-stream')
file_name = action.get('name', 'report')
if 'name' not in action:
reports = request.session.model('ir.actions.report.xml')
res_id = reports.search([('report_name', '=', action['report_name']),],
0, False, False, context)
if len(res_id) > 0:
file_name = reports.read(res_id[0], ['name'], context)['name']
else:
file_name = action['report_name']
file_name = '%s.%s' % (file_name, report_struct['format'])
return request.make_response(report,
headers=[
('Content-Disposition', content_disposition(file_name)),
('Content-Type', report_mimetype),
('Content-Length', len(report))],
cookies={'fileToken': token})
class Apps(http.Controller):
@http.route('/apps/<app>', auth='user')
def get_app_url(self, req, app):
act_window_obj = request.session.model('ir.actions.act_window')
ir_model_data = request.session.model('ir.model.data')
try:
action_id = ir_model_data.get_object_reference('base', 'open_module_tree')[1]
action = act_window_obj.read(action_id, ['name', 'type', 'res_model', 'view_mode', 'view_type', 'context', 'views', 'domain'])
action['target'] = 'current'
except ValueError:
action = False
try:
app_id = ir_model_data.get_object_reference('base', 'module_%s' % app)[1]
except ValueError:
app_id = False
if action and app_id:
action['res_id'] = app_id
action['view_mode'] = 'form'
action['views'] = [(False, u'form')]
sakey = Session().save_session_action(action)
debug = '?debug' if req.debug else ''
return werkzeug.utils.redirect('/web{0}#sa={1}'.format(debug, sakey))
|
LiveZenLK/CeygateERP
|
addons/web/controllers/main.py
|
Python
|
gpl-3.0
| 65,553
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import werkzeug.urls
from odoo import fields
from odoo import http
from odoo.http import request
from odoo.addons.website.models.website import unslug
from odoo.tools.translate import _
class WebsiteMembership(http.Controller):
_references_per_page = 20
@http.route([
'/members',
'/members/page/<int:page>',
'/members/association/<membership_id>',
'/members/association/<membership_id>/page/<int:page>',
'/members/country/<int:country_id>',
'/members/country/<country_name>-<int:country_id>',
'/members/country/<int:country_id>/page/<int:page>',
'/members/country/<country_name>-<int:country_id>/page/<int:page>',
'/members/association/<membership_id>/country/<country_name>-<int:country_id>',
'/members/association/<membership_id>/country/<int:country_id>',
'/members/association/<membership_id>/country/<country_name>-<int:country_id>/page/<int:page>',
'/members/association/<membership_id>/country/<int:country_id>/page/<int:page>',
], type='http', auth="public", website=True)
def members(self, membership_id=None, country_name=None, country_id=0, page=1, **post):
Product = request.env['product.product']
Country = request.env['res.country']
MembershipLine = request.env['membership.membership_line']
Partner = request.env['res.partner']
post_name = post.get('search') or post.get('name', '')
current_country = None
today = fields.Date.today()
# base domain for groupby / searches
base_line_domain = [
("partner.website_published", "=", True), ('state', '=', 'paid'),
('date_to', '>=', today), ('date_from', '<=', today)
]
if membership_id and membership_id != 'free':
membership_id = int(membership_id)
base_line_domain.append(('membership_id', '=', membership_id))
if post_name:
base_line_domain += ['|', ('partner.name', 'ilike', post_name), ('partner.website_description', 'ilike', post_name)]
# group by country, based on all customers (base domain)
if membership_id != 'free':
membership_lines = MembershipLine.sudo().search(base_line_domain)
country_domain = [('member_lines', 'in', membership_lines.ids)]
if not membership_id:
country_domain = ['|', country_domain[0], ('membership_state', '=', 'free')]
else:
country_domain = [('membership_state', '=', 'free')]
if post_name:
country_domain += ['|', ('name', 'ilike', post_name), ('website_description', 'ilike', post_name)]
countries = Partner.sudo().read_group(country_domain + [("website_published", "=", True)], ["id", "country_id"], groupby="country_id", orderby="country_id")
countries_total = sum(country_dict['country_id_count'] for country_dict in countries)
line_domain = list(base_line_domain)
if country_id:
line_domain.append(('partner.country_id', '=', country_id))
current_country = Country.browse(country_id).read(['id', 'name'])[0]
if not any(x['country_id'][0] == country_id for x in countries if x['country_id']):
countries.append({
'country_id_count': 0,
'country_id': (country_id, current_country["name"])
})
countries = filter(lambda d:d['country_id'], countries)
countries.sort(key=lambda d: d['country_id'][1])
countries.insert(0, {
'country_id_count': countries_total,
'country_id': (0, _("All Countries"))
})
# format domain for group_by and memberships
memberships = Product.search([('membership', '=', True)], order="website_sequence")
# make sure we don't access to lines with unpublished membershipts
line_domain.append(('membership_id', 'in', memberships.ids))
limit = self._references_per_page
offset = limit * (page - 1)
count_members = 0
membership_lines = MembershipLine.sudo()
# displayed non-free membership lines
if membership_id != 'free':
count_members = MembershipLine.sudo().search_count(line_domain)
if offset <= count_members:
membership_lines = MembershipLine.sudo().search(line_domain, offset, limit)
page_partner_ids = set(m.partner.id for m in membership_lines)
# get google maps localization of partners
google_map_partner_ids = []
if request.env.ref('website_membership.opt_index_google_map').customize_show:
google_map_partner_ids = MembershipLine.search(line_domain).get_published_companies(limit=2000)
search_domain = [('membership_state', '=', 'free'), ('website_published', '=', True)]
if post_name:
search_domain += ['|', ('name', 'ilike', post_name), ('website_description', 'ilike', post_name)]
if country_id:
search_domain += [('country_id', '=', country_id)]
free_partners = Partner.sudo().search(search_domain)
free_partner_ids = []
memberships_data = []
for membership_record in memberships:
memberships_data.append({'id': membership_record.id, 'name': membership_record.name})
memberships_partner_ids = {}
for line in membership_lines:
memberships_partner_ids.setdefault(line.membership_id.id, []).append(line.partner.id)
if free_partners:
memberships_data.append({'id': 'free', 'name': _('Free Members')})
if not membership_id or membership_id == 'free':
if count_members < offset + limit:
free_start = max(offset - count_members, 0)
free_end = max(offset + limit - count_members, 0)
memberships_partner_ids['free'] = free_partners.ids[free_start:free_end]
page_partner_ids |= set(memberships_partner_ids['free'])
google_map_partner_ids += free_partner_ids[:2000-len(google_map_partner_ids)]
count_members += len(free_partner_ids)
google_map_partner_ids = ",".join(map(str, google_map_partner_ids))
google_maps_api_key = request.env['ir.config_parameter'].sudo().get_param('google_maps_api_key')
partners = {p.id: p for p in Partner.sudo().browse(list(page_partner_ids))}
base_url = '/members%s%s' % ('/association/%s' % membership_id if membership_id else '',
'/country/%s' % country_id if country_id else '')
# request pager for lines
pager = request.website.pager(url=base_url, total=count_members, page=page, step=limit, scope=7, url_args=post)
values = {
'partners': partners,
'memberships_data': memberships_data,
'memberships_partner_ids': memberships_partner_ids,
'membership_id': membership_id,
'countries': countries,
'current_country': current_country and [current_country['id'], current_country['name']] or None,
'current_country_id': current_country and current_country['id'] or 0,
'google_map_partner_ids': google_map_partner_ids,
'pager': pager,
'post': post,
'search': "?%s" % werkzeug.url_encode(post),
'search_count': count_members,
'google_maps_api_key': google_maps_api_key,
}
return request.render("website_membership.index", values)
# Do not use semantic controller due to SUPERUSER_ID
@http.route(['/members/<partner_id>'], type='http', auth="public", website=True)
def partners_detail(self, partner_id, **post):
_, partner_id = unslug(partner_id)
if partner_id:
partner = request.env['res.partner'].sudo().browse(partner_id)
if partner.exists() and partner.website_published: # TODO should be done with access rules
values = {}
values['main_object'] = values['partner'] = partner
return request.render("website_membership.partner", values)
return self.members(**post)
|
chienlieu2017/it_management
|
odoo/addons/website_membership/controllers/main.py
|
Python
|
gpl-3.0
| 8,355
|
#!/usr/bin/env python
# coding:utf-8
# sname + birth rule
"""
Copyright (c) 2016-2017 LandGrey (https://github.com/LandGrey/pydictor)
License: GNU GENERAL PUBLIC LICENSE Version 3
"""
from __future__ import unicode_literals
from rules.SDrule import SDrule
from lib.data.data import pyoptions
def SB(sname, birth):
for _ in SDrule(sname, birth):
yield _
for sn in sname:
for bd in birth:
# {sname birth SNAME}
yield sn.lower() + bd + sn.upper()
yield sn.lower() + bd[2:] + sn.upper()
yield sn.lower() + bd[:4] + bd[4:].replace('0', '') + sn.upper()
for suf in pyoptions.sedb_trick_suf:
yield sn.lower() + bd + sn.upper() + suf
yield sn.lower() + bd[2:] + sn.upper() + suf
yield sn.lower() + bd[:4] + bd[4:].replace('0', '') + sn.upper() + suf
# You can continue to add new and useful rules
#
|
LandGrey/pydictor
|
rules/SB.py
|
Python
|
gpl-3.0
| 936
|
TThostFtdcTraderIDType = "string"
TThostFtdcInvestorIDType = "string"
TThostFtdcBrokerIDType = "string"
TThostFtdcBrokerAbbrType = "string"
TThostFtdcBrokerNameType = "string"
TThostFtdcExchangeInstIDType = "string"
TThostFtdcOrderRefType = "string"
TThostFtdcParticipantIDType = "string"
TThostFtdcUserIDType = "string"
TThostFtdcPasswordType = "string"
TThostFtdcClientIDType = "string"
TThostFtdcInstrumentIDType = "string"
TThostFtdcInstrumentCodeType = "string"
TThostFtdcMarketIDType = "string"
TThostFtdcProductNameType = "string"
TThostFtdcExchangeIDType = "string"
TThostFtdcExchangeNameType = "string"
TThostFtdcExchangeAbbrType = "string"
TThostFtdcExchangeFlagType = "string"
TThostFtdcMacAddressType = "string"
TThostFtdcSystemIDType = "string"
TThostFtdcExchangePropertyType = "char"
TThostFtdcDateType = "string"
TThostFtdcTimeType = "string"
TThostFtdcLongTimeType = "string"
TThostFtdcInstrumentNameType = "string"
TThostFtdcSettlementGroupIDType = "string"
TThostFtdcOrderSysIDType = "string"
TThostFtdcTradeIDType = "string"
TThostFtdcCommandTypeType = "string"
TThostFtdcIPAddressType = "string"
TThostFtdcIPPortType = "int"
TThostFtdcProductInfoType = "string"
TThostFtdcProtocolInfoType = "string"
TThostFtdcBusinessUnitType = "string"
TThostFtdcDepositSeqNoType = "string"
TThostFtdcIdentifiedCardNoType = "string"
TThostFtdcIdCardTypeType = "char"
TThostFtdcOrderLocalIDType = "string"
TThostFtdcUserNameType = "string"
TThostFtdcPartyNameType = "string"
TThostFtdcErrorMsgType = "string"
TThostFtdcFieldNameType = "string"
TThostFtdcFieldContentType = "string"
TThostFtdcSystemNameType = "string"
TThostFtdcContentType = "string"
TThostFtdcInvestorRangeType = "char"
TThostFtdcDepartmentRangeType = "char"
TThostFtdcDataSyncStatusType = "char"
TThostFtdcBrokerDataSyncStatusType = "char"
TThostFtdcExchangeConnectStatusType = "char"
TThostFtdcTraderConnectStatusType = "char"
TThostFtdcFunctionCodeType = "char"
TThostFtdcBrokerFunctionCodeType = "char"
TThostFtdcOrderActionStatusType = "char"
TThostFtdcOrderStatusType = "char"
TThostFtdcOrderSubmitStatusType = "char"
TThostFtdcPositionDateType = "char"
TThostFtdcPositionDateTypeType = "char"
TThostFtdcTradingRoleType = "char"
TThostFtdcProductClassType = "char"
TThostFtdcInstLifePhaseType = "char"
TThostFtdcDirectionType = "char"
TThostFtdcPositionTypeType = "char"
TThostFtdcPosiDirectionType = "char"
TThostFtdcSysSettlementStatusType = "char"
TThostFtdcRatioAttrType = "char"
TThostFtdcHedgeFlagType = "char"
TThostFtdcBillHedgeFlagType = "char"
TThostFtdcClientIDTypeType = "char"
TThostFtdcOrderPriceTypeType = "char"
TThostFtdcOffsetFlagType = "char"
TThostFtdcForceCloseReasonType = "char"
TThostFtdcOrderTypeType = "char"
TThostFtdcTimeConditionType = "char"
TThostFtdcVolumeConditionType = "char"
TThostFtdcContingentConditionType = "char"
TThostFtdcActionFlagType = "char"
TThostFtdcTradingRightType = "char"
TThostFtdcOrderSourceType = "char"
TThostFtdcTradeTypeType = "char"
TThostFtdcPriceSourceType = "char"
TThostFtdcInstrumentStatusType = "char"
TThostFtdcInstStatusEnterReasonType = "char"
TThostFtdcOrderActionRefType = "int"
TThostFtdcInstallCountType = "int"
TThostFtdcInstallIDType = "int"
TThostFtdcErrorIDType = "int"
TThostFtdcSettlementIDType = "int"
TThostFtdcVolumeType = "int"
TThostFtdcFrontIDType = "int"
TThostFtdcSessionIDType = "int"
TThostFtdcSequenceNoType = "int"
TThostFtdcCommandNoType = "int"
TThostFtdcMillisecType = "int"
TThostFtdcVolumeMultipleType = "int"
TThostFtdcTradingSegmentSNType = "int"
TThostFtdcRequestIDType = "int"
TThostFtdcYearType = "int"
TThostFtdcMonthType = "int"
TThostFtdcBoolType = "int"
TThostFtdcPriceType = "double"
TThostFtdcCombOffsetFlagType = "string"
TThostFtdcCombHedgeFlagType = "string"
TThostFtdcRatioType = "double"
TThostFtdcMoneyType = "double"
TThostFtdcLargeVolumeType = "double"
TThostFtdcSequenceSeriesType = "int"
TThostFtdcCommPhaseNoType = "int"
TThostFtdcSequenceLabelType = "string"
TThostFtdcUnderlyingMultipleType = "double"
TThostFtdcPriorityType = "int"
TThostFtdcContractCodeType = "string"
TThostFtdcCityType = "string"
TThostFtdcIsStockType = "string"
TThostFtdcChannelType = "string"
TThostFtdcAddressType = "string"
TThostFtdcZipCodeType = "string"
TThostFtdcTelephoneType = "string"
TThostFtdcFaxType = "string"
TThostFtdcMobileType = "string"
TThostFtdcEMailType = "string"
TThostFtdcMemoType = "string"
TThostFtdcCompanyCodeType = "string"
TThostFtdcWebsiteType = "string"
TThostFtdcTaxNoType = "string"
TThostFtdcBatchStatusType = "char"
TThostFtdcPropertyIDType = "string"
TThostFtdcPropertyNameType = "string"
TThostFtdcLicenseNoType = "string"
TThostFtdcAgentIDType = "string"
TThostFtdcAgentNameType = "string"
TThostFtdcAgentGroupIDType = "string"
TThostFtdcAgentGroupNameType = "string"
TThostFtdcReturnStyleType = "char"
TThostFtdcReturnPatternType = "char"
TThostFtdcReturnLevelType = "char"
TThostFtdcReturnStandardType = "char"
TThostFtdcMortgageTypeType = "char"
TThostFtdcInvestorSettlementParamIDType = "char"
TThostFtdcExchangeSettlementParamIDType = "char"
TThostFtdcSystemParamIDType = "char"
TThostFtdcTradeParamIDType = "char"
TThostFtdcSettlementParamValueType = "string"
TThostFtdcCounterIDType = "string"
TThostFtdcInvestorGroupNameType = "string"
TThostFtdcBrandCodeType = "string"
TThostFtdcWarehouseType = "string"
TThostFtdcProductDateType = "string"
TThostFtdcGradeType = "string"
TThostFtdcClassifyType = "string"
TThostFtdcPositionType = "string"
TThostFtdcYieldlyType = "string"
TThostFtdcWeightType = "string"
TThostFtdcSubEntryFundNoType = "int"
TThostFtdcFileIDType = "char"
TThostFtdcFileNameType = "string"
TThostFtdcFileTypeType = "char"
TThostFtdcFileFormatType = "char"
TThostFtdcFileUploadStatusType = "char"
TThostFtdcTransferDirectionType = "char"
TThostFtdcUploadModeType = "string"
TThostFtdcAccountIDType = "string"
TThostFtdcBankFlagType = "string"
TThostFtdcBankAccountType = "string"
TThostFtdcOpenNameType = "string"
TThostFtdcOpenBankType = "string"
TThostFtdcBankNameType = "string"
TThostFtdcPublishPathType = "string"
TThostFtdcOperatorIDType = "string"
TThostFtdcMonthCountType = "int"
TThostFtdcAdvanceMonthArrayType = "string"
TThostFtdcDateExprType = "string"
TThostFtdcInstrumentIDExprType = "string"
TThostFtdcInstrumentNameExprType = "string"
TThostFtdcSpecialCreateRuleType = "char"
TThostFtdcBasisPriceTypeType = "char"
TThostFtdcProductLifePhaseType = "char"
TThostFtdcDeliveryModeType = "char"
TThostFtdcLogLevelType = "string"
TThostFtdcProcessNameType = "string"
TThostFtdcOperationMemoType = "string"
TThostFtdcFundIOTypeType = "char"
TThostFtdcFundTypeType = "char"
TThostFtdcFundDirectionType = "char"
TThostFtdcFundStatusType = "char"
TThostFtdcBillNoType = "string"
TThostFtdcBillNameType = "string"
TThostFtdcPublishStatusType = "char"
TThostFtdcEnumValueIDType = "string"
TThostFtdcEnumValueTypeType = "string"
TThostFtdcEnumValueLabelType = "string"
TThostFtdcEnumValueResultType = "string"
TThostFtdcSystemStatusType = "char"
TThostFtdcSettlementStatusType = "char"
TThostFtdcRangeIntTypeType = "string"
TThostFtdcRangeIntFromType = "string"
TThostFtdcRangeIntToType = "string"
TThostFtdcFunctionIDType = "string"
TThostFtdcFunctionValueCodeType = "string"
TThostFtdcFunctionNameType = "string"
TThostFtdcRoleIDType = "string"
TThostFtdcRoleNameType = "string"
TThostFtdcDescriptionType = "string"
TThostFtdcCombineIDType = "string"
TThostFtdcCombineTypeType = "string"
TThostFtdcInvestorTypeType = "char"
TThostFtdcBrokerTypeType = "char"
TThostFtdcRiskLevelType = "char"
TThostFtdcFeeAcceptStyleType = "char"
TThostFtdcPasswordTypeType = "char"
TThostFtdcAlgorithmType = "char"
TThostFtdcIncludeCloseProfitType = "char"
TThostFtdcAllWithoutTradeType = "char"
TThostFtdcCommentType = "string"
TThostFtdcVersionType = "string"
TThostFtdcTradeCodeType = "string"
TThostFtdcTradeDateType = "string"
TThostFtdcTradeTimeType = "string"
TThostFtdcTradeSerialType = "string"
TThostFtdcTradeSerialNoType = "int"
TThostFtdcFutureIDType = "string"
TThostFtdcBankIDType = "string"
TThostFtdcBankBrchIDType = "string"
TThostFtdcBankBranchIDType = "string"
TThostFtdcOperNoType = "string"
TThostFtdcDeviceIDType = "string"
TThostFtdcRecordNumType = "string"
TThostFtdcFutureAccountType = "string"
TThostFtdcFuturePwdFlagType = "char"
TThostFtdcTransferTypeType = "char"
TThostFtdcFutureAccPwdType = "string"
TThostFtdcCurrencyCodeType = "string"
TThostFtdcRetCodeType = "string"
TThostFtdcRetInfoType = "string"
TThostFtdcTradeAmtType = "string"
TThostFtdcUseAmtType = "string"
TThostFtdcFetchAmtType = "string"
TThostFtdcTransferValidFlagType = "char"
TThostFtdcCertCodeType = "string"
TThostFtdcReasonType = "char"
TThostFtdcFundProjectIDType = "string"
TThostFtdcSexType = "char"
TThostFtdcProfessionType = "string"
TThostFtdcNationalType = "string"
TThostFtdcProvinceType = "string"
TThostFtdcRegionType = "string"
TThostFtdcCountryType = "string"
TThostFtdcLicenseNOType = "string"
TThostFtdcCompanyTypeType = "string"
TThostFtdcBusinessScopeType = "string"
TThostFtdcCapitalCurrencyType = "string"
TThostFtdcUserTypeType = "char"
TThostFtdcBranchIDType = "string"
TThostFtdcRateTypeType = "char"
TThostFtdcNoteTypeType = "char"
TThostFtdcSettlementStyleType = "char"
TThostFtdcBrokerDNSType = "string"
TThostFtdcSentenceType = "string"
TThostFtdcSettlementBillTypeType = "char"
TThostFtdcUserRightTypeType = "char"
TThostFtdcMarginPriceTypeType = "char"
TThostFtdcBillGenStatusType = "char"
TThostFtdcAlgoTypeType = "char"
TThostFtdcHandlePositionAlgoIDType = "char"
TThostFtdcFindMarginRateAlgoIDType = "char"
TThostFtdcHandleTradingAccountAlgoIDType = "char"
TThostFtdcPersonTypeType = "char"
TThostFtdcQueryInvestorRangeType = "char"
TThostFtdcInvestorRiskStatusType = "char"
TThostFtdcLegIDType = "int"
TThostFtdcLegMultipleType = "int"
TThostFtdcImplyLevelType = "int"
TThostFtdcClearAccountType = "string"
TThostFtdcOrganNOType = "string"
TThostFtdcClearbarchIDType = "string"
TThostFtdcUserEventTypeType = "char"
TThostFtdcUserEventInfoType = "string"
TThostFtdcCloseStyleType = "char"
TThostFtdcStatModeType = "char"
TThostFtdcParkedOrderStatusType = "char"
TThostFtdcParkedOrderIDType = "string"
TThostFtdcParkedOrderActionIDType = "string"
TThostFtdcVirDealStatusType = "char"
TThostFtdcOrgSystemIDType = "char"
TThostFtdcVirTradeStatusType = "char"
TThostFtdcVirBankAccTypeType = "char"
TThostFtdcVirementStatusType = "char"
TThostFtdcVirementAvailAbilityType = "char"
TThostFtdcVirementTradeCodeType = "char"
TThostFtdcPhotoTypeNameType = "string"
TThostFtdcPhotoTypeIDType = "string"
TThostFtdcPhotoNameType = "string"
TThostFtdcTopicIDType = "int"
TThostFtdcReportTypeIDType = "string"
TThostFtdcCharacterIDType = "string"
TThostFtdcAMLParamIDType = "string"
TThostFtdcAMLInvestorTypeType = "string"
TThostFtdcAMLIdCardTypeType = "string"
TThostFtdcAMLTradeDirectType = "string"
TThostFtdcAMLTradeModelType = "string"
TThostFtdcAMLParamIDType = "string"
TThostFtdcAMLOpParamValueType = "double"
TThostFtdcAMLCustomerCardTypeType = "string"
TThostFtdcAMLInstitutionNameType = "string"
TThostFtdcAMLDistrictIDType = "string"
TThostFtdcAMLRelationShipType = "string"
TThostFtdcAMLInstitutionTypeType = "string"
TThostFtdcAMLInstitutionIDType = "string"
TThostFtdcAMLAccountTypeType = "string"
TThostFtdcAMLTradingTypeType = "string"
TThostFtdcAMLTransactClassType = "string"
TThostFtdcAMLCapitalIOType = "string"
TThostFtdcAMLSiteType = "string"
TThostFtdcAMLCapitalPurposeType = "string"
TThostFtdcAMLReportTypeType = "string"
TThostFtdcAMLSerialNoType = "string"
TThostFtdcAMLStatusType = "string"
TThostFtdcAMLGenStatusType = "char"
TThostFtdcAMLSeqCodeType = "string"
TThostFtdcAMLFileNameType = "string"
TThostFtdcAMLMoneyType = "double"
TThostFtdcAMLFileAmountType = "int"
TThostFtdcCFMMCKeyType = "string"
TThostFtdcCFMMCTokenType = "string"
TThostFtdcCFMMCKeyKindType = "char"
TThostFtdcAMLReportNameType = "string"
TThostFtdcIndividualNameType = "string"
TThostFtdcCurrencyIDType = "string"
TThostFtdcCustNumberType = "string"
TThostFtdcOrganCodeType = "string"
TThostFtdcOrganNameType = "string"
TThostFtdcSuperOrganCodeType = "string"
TThostFtdcSubBranchIDType = "string"
TThostFtdcSubBranchNameType = "string"
TThostFtdcBranchNetCodeType = "string"
TThostFtdcBranchNetNameType = "string"
TThostFtdcOrganFlagType = "string"
TThostFtdcBankCodingForFutureType = "string"
TThostFtdcBankReturnCodeType = "string"
TThostFtdcPlateReturnCodeType = "string"
TThostFtdcBankSubBranchIDType = "string"
TThostFtdcFutureBranchIDType = "string"
TThostFtdcReturnCodeType = "string"
TThostFtdcOperatorCodeType = "string"
TThostFtdcClearDepIDType = "string"
TThostFtdcClearBrchIDType = "string"
TThostFtdcClearNameType = "string"
TThostFtdcBankAccountNameType = "string"
TThostFtdcInvDepIDType = "string"
TThostFtdcInvBrchIDType = "string"
TThostFtdcMessageFormatVersionType = "string"
TThostFtdcDigestType = "string"
TThostFtdcAuthenticDataType = "string"
TThostFtdcPasswordKeyType = "string"
TThostFtdcFutureAccountNameType = "string"
TThostFtdcMobilePhoneType = "string"
TThostFtdcFutureMainKeyType = "string"
TThostFtdcFutureWorkKeyType = "string"
TThostFtdcFutureTransKeyType = "string"
TThostFtdcBankMainKeyType = "string"
TThostFtdcBankWorkKeyType = "string"
TThostFtdcBankTransKeyType = "string"
TThostFtdcBankServerDescriptionType = "string"
TThostFtdcAddInfoType = "string"
TThostFtdcDescrInfoForReturnCodeType = "string"
TThostFtdcCountryCodeType = "string"
TThostFtdcSerialType = "int"
TThostFtdcPlateSerialType = "int"
TThostFtdcBankSerialType = "string"
TThostFtdcCorrectSerialType = "int"
TThostFtdcFutureSerialType = "int"
TThostFtdcApplicationIDType = "int"
TThostFtdcBankProxyIDType = "int"
TThostFtdcFBTCoreIDType = "int"
TThostFtdcServerPortType = "int"
TThostFtdcRepealedTimesType = "int"
TThostFtdcRepealTimeIntervalType = "int"
TThostFtdcTotalTimesType = "int"
TThostFtdcFBTRequestIDType = "int"
TThostFtdcTIDType = "int"
TThostFtdcTradeAmountType = "double"
TThostFtdcCustFeeType = "double"
TThostFtdcFutureFeeType = "double"
TThostFtdcSingleMaxAmtType = "double"
TThostFtdcSingleMinAmtType = "double"
TThostFtdcTotalAmtType = "double"
TThostFtdcCertificationTypeType = "char"
TThostFtdcFileBusinessCodeType = "char"
TThostFtdcCashExchangeCodeType = "char"
TThostFtdcYesNoIndicatorType = "char"
TThostFtdcBanlanceTypeType = "char"
TThostFtdcGenderType = "char"
TThostFtdcFeePayFlagType = "char"
TThostFtdcPassWordKeyTypeType = "char"
TThostFtdcFBTPassWordTypeType = "char"
TThostFtdcFBTEncryModeType = "char"
TThostFtdcBankRepealFlagType = "char"
TThostFtdcBrokerRepealFlagType = "char"
TThostFtdcInstitutionTypeType = "char"
TThostFtdcLastFragmentType = "char"
TThostFtdcBankAccStatusType = "char"
TThostFtdcMoneyAccountStatusType = "char"
TThostFtdcManageStatusType = "char"
TThostFtdcSystemTypeType = "char"
TThostFtdcTxnEndFlagType = "char"
TThostFtdcProcessStatusType = "char"
TThostFtdcCustTypeType = "char"
TThostFtdcFBTTransferDirectionType = "char"
TThostFtdcOpenOrDestroyType = "char"
TThostFtdcAvailabilityFlagType = "char"
TThostFtdcOrganTypeType = "char"
TThostFtdcOrganLevelType = "char"
TThostFtdcProtocalIDType = "char"
TThostFtdcConnectModeType = "char"
TThostFtdcSyncModeType = "char"
TThostFtdcBankAccTypeType = "char"
TThostFtdcFutureAccTypeType = "char"
TThostFtdcOrganStatusType = "char"
TThostFtdcCCBFeeModeType = "char"
TThostFtdcCommApiTypeType = "char"
TThostFtdcServiceIDType = "int"
TThostFtdcServiceLineNoType = "int"
TThostFtdcServiceNameType = "string"
TThostFtdcLinkStatusType = "char"
TThostFtdcCommApiPointerType = "int"
TThostFtdcPwdFlagType = "char"
TThostFtdcSecuAccTypeType = "char"
TThostFtdcTransferStatusType = "char"
TThostFtdcSponsorTypeType = "char"
TThostFtdcReqRspTypeType = "char"
TThostFtdcFBTUserEventTypeType = "char"
TThostFtdcBankIDByBankType = "string"
TThostFtdcBankOperNoType = "string"
TThostFtdcBankCustNoType = "string"
TThostFtdcDBOPSeqNoType = "int"
TThostFtdcTableNameType = "string"
TThostFtdcPKNameType = "string"
TThostFtdcPKValueType = "string"
TThostFtdcDBOperationType = "char"
TThostFtdcSyncFlagType = "char"
TThostFtdcTargetIDType = "string"
TThostFtdcSyncTypeType = "char"
TThostFtdcFBETimeType = "string"
TThostFtdcFBEBankNoType = "string"
TThostFtdcFBECertNoType = "string"
TThostFtdcExDirectionType = "char"
TThostFtdcFBEBankAccountType = "string"
TThostFtdcFBEBankAccountNameType = "string"
TThostFtdcFBEAmtType = "double"
TThostFtdcFBEBusinessTypeType = "string"
TThostFtdcFBEPostScriptType = "string"
TThostFtdcFBERemarkType = "string"
TThostFtdcExRateType = "double"
TThostFtdcFBEResultFlagType = "char"
TThostFtdcFBERtnMsgType = "string"
TThostFtdcFBEExtendMsgType = "string"
TThostFtdcFBEBusinessSerialType = "string"
TThostFtdcFBESystemSerialType = "string"
TThostFtdcFBETotalExCntType = "int"
TThostFtdcFBEExchStatusType = "char"
TThostFtdcFBEFileFlagType = "char"
TThostFtdcFBEAlreadyTradeType = "char"
TThostFtdcFBEOpenBankType = "string"
TThostFtdcFBEUserEventTypeType = "char"
TThostFtdcFBEFileNameType = "string"
TThostFtdcFBEBatchSerialType = "string"
TThostFtdcFBEReqFlagType = "char"
TThostFtdcNotifyClassType = "char"
TThostFtdcRiskNofityInfoType = "string"
TThostFtdcForceCloseSceneIdType = "string"
TThostFtdcForceCloseTypeType = "char"
TThostFtdcInstrumentIDsType = "string"
TThostFtdcRiskNotifyMethodType = "char"
TThostFtdcRiskNotifyStatusType = "char"
TThostFtdcRiskUserEventType = "char"
TThostFtdcParamIDType = "int"
TThostFtdcParamNameType = "string"
TThostFtdcParamValueType = "string"
TThostFtdcConditionalOrderSortTypeType = "char"
TThostFtdcSendTypeType = "char"
TThostFtdcClientIDStatusType = "char"
TThostFtdcIndustryIDType = "string"
TThostFtdcQuestionIDType = "string"
TThostFtdcQuestionContentType = "string"
TThostFtdcOptionIDType = "string"
TThostFtdcOptionContentType = "string"
TThostFtdcQuestionTypeType = "char"
TThostFtdcProcessIDType = "string"
TThostFtdcSeqNoType = "int"
TThostFtdcUOAProcessStatusType = "string"
TThostFtdcProcessTypeType = "string"
TThostFtdcBusinessTypeType = "char"
TThostFtdcCfmmcReturnCodeType = "char"
TThostFtdcExReturnCodeType = "int"
TThostFtdcClientTypeType = "char"
TThostFtdcExchangeIDTypeType = "char"
TThostFtdcExClientIDTypeType = "char"
TThostFtdcClientClassifyType = "string"
TThostFtdcUOAOrganTypeType = "string"
TThostFtdcUOACountryCodeType = "string"
TThostFtdcAreaCodeType = "string"
TThostFtdcFuturesIDType = "string"
TThostFtdcCffmcDateType = "string"
TThostFtdcCffmcTimeType = "string"
TThostFtdcNocIDType = "string"
TThostFtdcUpdateFlagType = "char"
TThostFtdcApplyOperateIDType = "char"
TThostFtdcApplyStatusIDType = "char"
TThostFtdcSendMethodType = "char"
TThostFtdcEventTypeType = "string"
TThostFtdcEventModeType = "char"
TThostFtdcUOAAutoSendType = "char"
TThostFtdcQueryDepthType = "int"
TThostFtdcDataCenterIDType = "int"
TThostFtdcFlowIDType = "char"
TThostFtdcCheckLevelType = "char"
TThostFtdcCheckNoType = "int"
TThostFtdcCheckStatusType = "char"
TThostFtdcUsedStatusType = "char"
TThostFtdcRateTemplateNameType = "string"
TThostFtdcPropertyStringType = "string"
TThostFtdcBankAcountOriginType = "char"
TThostFtdcMonthBillTradeSumType = "char"
TThostFtdcFBTTradeCodeEnumType = "char"
TThostFtdcRateTemplateIDType = "string"
TThostFtdcRiskRateType = "string"
TThostFtdcTimestampType = "int"
TThostFtdcInvestorIDRuleNameType = "string"
TThostFtdcInvestorIDRuleExprType = "string"
TThostFtdcLastDriftType = "int"
TThostFtdcLastSuccessType = "int"
TThostFtdcAuthKeyType = "string"
TThostFtdcSerialNumberType = "string"
TThostFtdcOTPTypeType = "char"
TThostFtdcOTPVendorsIDType = "string"
TThostFtdcOTPVendorsNameType = "string"
TThostFtdcOTPStatusType = "char"
TThostFtdcBrokerUserTypeType = "char"
TThostFtdcFutureTypeType = "char"
TThostFtdcFundEventTypeType = "char"
TThostFtdcAccountSourceTypeType = "char"
TThostFtdcCodeSourceTypeType = "char"
TThostFtdcUserRangeType = "char"
TThostFtdcTimeSpanType = "string"
TThostFtdcImportSequenceIDType = "string"
TThostFtdcByGroupType = "char"
TThostFtdcTradeSumStatModeType = "char"
TThostFtdcComTypeType = "int"
TThostFtdcUserProductIDType = "string"
TThostFtdcUserProductNameType = "string"
TThostFtdcUserProductMemoType = "string"
TThostFtdcCSRCCancelFlagType = "string"
TThostFtdcCSRCDateType = "string"
TThostFtdcCSRCInvestorNameType = "string"
TThostFtdcCSRCOpenInvestorNameType = "string"
TThostFtdcCSRCInvestorIDType = "string"
TThostFtdcCSRCIdentifiedCardNoType = "string"
TThostFtdcCSRCClientIDType = "string"
TThostFtdcCSRCBankFlagType = "string"
TThostFtdcCSRCBankAccountType = "string"
TThostFtdcCSRCOpenNameType = "string"
TThostFtdcCSRCMemoType = "string"
TThostFtdcCSRCTimeType = "string"
TThostFtdcCSRCTradeIDType = "string"
TThostFtdcCSRCExchangeInstIDType = "string"
TThostFtdcCSRCMortgageNameType = "string"
TThostFtdcCSRCReasonType = "string"
TThostFtdcIsSettlementType = "string"
TThostFtdcCSRCMoneyType = "double"
TThostFtdcCSRCPriceType = "double"
TThostFtdcCSRCOptionsTypeType = "string"
TThostFtdcCSRCStrikePriceType = "double"
TThostFtdcCSRCTargetProductIDType = "string"
TThostFtdcCSRCTargetInstrIDType = "string"
TThostFtdcCommModelNameType = "string"
TThostFtdcCommModelMemoType = "string"
TThostFtdcExprSetModeType = "char"
TThostFtdcRateInvestorRangeType = "char"
TThostFtdcAgentBrokerIDType = "string"
TThostFtdcDRIdentityIDType = "int"
TThostFtdcDRIdentityNameType = "string"
TThostFtdcDBLinkIDType = "string"
TThostFtdcSyncDataStatusType = "char"
TThostFtdcTradeSourceType = "char"
TThostFtdcFlexStatModeType = "char"
TThostFtdcByInvestorRangeType = "char"
TThostFtdcSRiskRateType = "string"
TThostFtdcSequenceNo12Type = "int"
TThostFtdcPropertyInvestorRangeType = "char"
TThostFtdcFileStatusType = "char"
TThostFtdcFileGenStyleType = "char"
TThostFtdcSysOperModeType = "char"
TThostFtdcSysOperTypeType = "char"
TThostFtdcCSRCDataQueyTypeType = "char"
TThostFtdcFreezeStatusType = "char"
TThostFtdcStandardStatusType = "char"
TThostFtdcCSRCFreezeStatusType = "string"
TThostFtdcRightParamTypeType = "char"
TThostFtdcRightTemplateIDType = "string"
TThostFtdcRightTemplateNameType = "string"
TThostFtdcDataStatusType = "char"
TThostFtdcAMLCheckStatusType = "char"
TThostFtdcAmlDateTypeType = "char"
TThostFtdcAmlCheckLevelType = "char"
TThostFtdcAmlCheckFlowType = "string"
TThostFtdcDataTypeType = "string"
TThostFtdcExportFileTypeType = "char"
TThostFtdcSettleManagerTypeType = "char"
TThostFtdcSettleManagerIDType = "string"
TThostFtdcSettleManagerNameType = "string"
TThostFtdcSettleManagerLevelType = "char"
TThostFtdcSettleManagerGroupType = "char"
TThostFtdcCheckResultMemoType = "string"
TThostFtdcFunctionUrlType = "string"
TThostFtdcAuthInfoType = "string"
TThostFtdcAuthCodeType = "string"
TThostFtdcLimitUseTypeType = "char"
TThostFtdcDataResourceType = "char"
TThostFtdcMarginTypeType = "char"
TThostFtdcActiveTypeType = "char"
TThostFtdcMarginRateTypeType = "char"
TThostFtdcBackUpStatusType = "char"
TThostFtdcInitSettlementType = "char"
TThostFtdcReportStatusType = "char"
TThostFtdcSaveStatusType = "char"
TThostFtdcSettArchiveStatusType = "char"
TThostFtdcCTPTypeType = "char"
TThostFtdcToolIDType = "string"
TThostFtdcToolNameType = "string"
TThostFtdcCloseDealTypeType = "char"
TThostFtdcMortgageFundUseRangeType = "char"
TThostFtdcCurrencyUnitType = "double"
TThostFtdcExchangeRateType = "double"
TThostFtdcSpecProductTypeType = "char"
TThostFtdcFundMortgageTypeType = "char"
TThostFtdcAccountSettlementParamIDType = "char"
TThostFtdcCurrencyNameType = "string"
TThostFtdcCurrencySignType = "string"
TThostFtdcFundMortDirectionType = "char"
TThostFtdcBusinessClassType = "char"
TThostFtdcSwapSourceTypeType = "char"
TThostFtdcCurrExDirectionType = "char"
TThostFtdcCurrencySwapStatusType = "char"
TThostFtdcCurrExchCertNoType = "string"
TThostFtdcBatchSerialNoType = "string"
TThostFtdcReqFlagType = "char"
TThostFtdcResFlagType = "char"
TThostFtdcPageControlType = "string"
TThostFtdcRecordCountType = "int"
TThostFtdcCurrencySwapMemoType = "string"
TThostFtdcExStatusType = "char"
TThostFtdcClientRegionType = "char"
TThostFtdcWorkPlaceType = "string"
TThostFtdcBusinessPeriodType = "string"
TThostFtdcWebSiteType = "string"
TThostFtdcUOAIdCardTypeType = "string"
TThostFtdcClientModeType = "string"
TThostFtdcInvestorFullNameType = "string"
TThostFtdcUOABrokerIDType = "string"
TThostFtdcUOAZipCodeType = "string"
TThostFtdcUOAEMailType = "string"
TThostFtdcOldCityType = "string"
TThostFtdcCorporateIdentifiedCardNoType = "string"
TThostFtdcHasBoardType = "char"
TThostFtdcStartModeType = "char"
TThostFtdcTemplateTypeType = "char"
TThostFtdcLoginModeType = "char"
TThostFtdcPromptTypeType = "char"
TThostFtdcLedgerManageIDType = "string"
TThostFtdcInvestVarietyType = "string"
TThostFtdcBankAccountTypeType = "string"
TThostFtdcLedgerManageBankType = "string"
TThostFtdcCffexDepartmentNameType = "string"
TThostFtdcCffexDepartmentCodeType = "string"
TThostFtdcHasTrusteeType = "char"
TThostFtdcCSRCMemo1Type = "string"
TThostFtdcAssetmgrCFullNameType = "string"
TThostFtdcAssetmgrApprovalNOType = "string"
TThostFtdcAssetmgrMgrNameType = "string"
TThostFtdcAmTypeType = "char"
TThostFtdcCSRCAmTypeType = "string"
TThostFtdcCSRCFundIOTypeType = "char"
TThostFtdcCusAccountTypeType = "char"
TThostFtdcCSRCNationalType = "string"
TThostFtdcCSRCSecAgentIDType = "string"
TThostFtdcLanguageTypeType = "char"
TThostFtdcAmAccountType = "string"
TThostFtdcAssetmgrClientTypeType = "char"
TThostFtdcAssetmgrTypeType = "char"
TThostFtdcUOMType = "string"
TThostFtdcSHFEInstLifePhaseType = "string"
TThostFtdcSHFEProductClassType = "string"
TThostFtdcPriceDecimalType = "string"
TThostFtdcInTheMoneyFlagType = "string"
TThostFtdcCheckInstrTypeType = "char"
TThostFtdcDeliveryTypeType = "char"
TThostFtdcBigMoneyType = "double"
TThostFtdcMaxMarginSideAlgorithmType = "char"
TThostFtdcDAClientTypeType = "char"
TThostFtdcCombinInstrIDType = "string"
TThostFtdcCombinSettlePriceType = "string"
TThostFtdcDCEPriorityType = "int"
TThostFtdcTradeGroupIDType = "int"
TThostFtdcIsCheckPrepaType = "int"
TThostFtdcUOAAssetmgrTypeType = "char"
TThostFtdcDirectionEnType = "char"
TThostFtdcOffsetFlagEnType = "char"
TThostFtdcHedgeFlagEnType = "char"
TThostFtdcFundIOTypeEnType = "char"
TThostFtdcFundTypeEnType = "char"
TThostFtdcFundDirectionEnType = "char"
TThostFtdcFundMortDirectionEnType = "char"
TThostFtdcSwapBusinessTypeType = "string"
TThostFtdcOptionsTypeType = "char"
TThostFtdcStrikeModeType = "char"
TThostFtdcStrikeTypeType = "char"
TThostFtdcApplyTypeType = "char"
TThostFtdcGiveUpDataSourceType = "char"
TThostFtdcExecOrderSysIDType = "string"
TThostFtdcExecResultType = "char"
TThostFtdcStrikeSequenceType = "int"
TThostFtdcStrikeTimeType = "string"
TThostFtdcCombinationTypeType = "char"
TThostFtdcOptionRoyaltyPriceTypeType = "char"
TThostFtdcBalanceAlgorithmType = "char"
TThostFtdcActionTypeType = "char"
TThostFtdcForQuoteStatusType = "char"
TThostFtdcValueMethodType = "char"
TThostFtdcExecOrderPositionFlagType = "char"
TThostFtdcExecOrderCloseFlagType = "char"
TThostFtdcProductTypeType = "char"
TThostFtdcCZCEUploadFileNameType = "char"
TThostFtdcDCEUploadFileNameType = "char"
TThostFtdcSHFEUploadFileNameType = "char"
TThostFtdcCFFEXUploadFileNameType = "char"
TThostFtdcCombDirectionType = "char"
TThostFtdcStrikeOffsetTypeType = "char"
TThostFtdcReserveOpenAccStasType = "char"
TThostFtdcLoginRemarkType = "string"
TThostFtdcInvestUnitIDType = "string"
TThostFtdcBulletinIDType = "int"
TThostFtdcNewsTypeType = "string"
TThostFtdcNewsUrgencyType = "char"
TThostFtdcAbstractType = "string"
TThostFtdcComeFromType = "string"
TThostFtdcURLLinkType = "string"
TThostFtdcLongIndividualNameType = "string"
TThostFtdcLongFBEBankAccountNameType = "string"
TThostFtdcDateTimeType = "string"
TThostFtdcWeakPasswordSourceType = "char"
TThostFtdcRandomStringType = "string"
TThostFtdcOptSelfCloseFlagType = "char"
TThostFtdcBizTypeType = "char"
TThostFtdcAppTypeType = "char"
TThostFtdcAppIDType = "string"
TThostFtdcSystemInfoLenType = "int"
TThostFtdcAdditionalInfoLenType = "int"
TThostFtdcClientSystemInfoType = "string"
TThostFtdcAdditionalInfoType = "string"
TThostFtdcBase64ClientSystemInfoType = "string"
TThostFtdcBase64AdditionalInfoType = "string"
TThostFtdcCurrentAuthMethodType = "int"
TThostFtdcCaptchaInfoLenType = "int"
TThostFtdcCaptchaInfoType = "string"
TThostFtdcUserTextSeqType = "int"
TThostFtdcHandshakeDataType = "string"
TThostFtdcHandshakeDataLenType = "int"
TThostFtdcCryptoKeyVersionType = "string"
TThostFtdcRsaKeyVersionType = "int"
TThostFtdcSoftwareProviderIDType = "string"
TThostFtdcCollectTimeType = "string"
TThostFtdcQueryFreqType = "int"
TThostFtdcResponseValueType = "char"
TThostFtdcOTCTradeTypeType = "char"
TThostFtdcMatchTypeType = "char"
TThostFtdcOTCTraderIDType = "string"
TThostFtdcRiskValueType = "double"
TThostFtdcIDBNameType = "string"
|
andrewchenshx/vnpy
|
vnpy/api/ctp/generator/ctp_typedef.py
|
Python
|
mit
| 28,738
|
#!/usr/bin/env python
"""
moveit_ik_demo.py - Version 0.1 2014-01-14
Use inverse kinemtatics to move the end effector to a specified pose
Created for the Pi Robot Project: http://www.pirobot.org
Copyright (c) 2014 Patrick Goebel. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.5
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details at:
http://www.gnu.org/licenses/gpl.html
"""
import rospy, sys
import moveit_commander
from moveit_msgs.msg import RobotTrajectory
from trajectory_msgs.msg import JointTrajectoryPoint
from geometry_msgs.msg import PoseStamped, Pose
from tf.transformations import euler_from_quaternion, quaternion_from_euler
class MoveItDemo:
def __init__(self):
# Initialize the move_group API
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('moveit_demo')
# Initialize the move group for the right arm
right_arm = moveit_commander.MoveGroupCommander('right_arm')
# Get the name of the end-effector link
end_effector_link = right_arm.get_end_effector_link()
# Set the reference frame for pose targets
reference_frame = 'base_footprint'
# Set the right arm reference frame accordingly
right_arm.set_pose_reference_frame(reference_frame)
# Allow replanning to increase the odds of a solution
right_arm.allow_replanning(True)
# Allow some leeway in position (meters) and orientation (radians)
right_arm.set_goal_position_tolerance(0.05)
right_arm.set_goal_orientation_tolerance(0.1)
# Start the arm in the "resting" pose stored in the SRDF file
right_arm.set_named_target('resting')
right_arm.go()
rospy.sleep(2)
# Set the target pose. This particular pose has the gripper oriented horizontally
# 0.85 meters above the ground, 0.10 meters to the right and 0.20 meters ahead of
# the center of the robot base.
target_pose = PoseStamped()
target_pose.header.frame_id = reference_frame
target_pose.header.stamp = rospy.Time.now()
target_pose.pose.position.x = -0.176282601392
target_pose.pose.position.y = -0.382988793888
target_pose.pose.position.z = 0.659669497255
target_pose.pose.orientation.x = -0.1956855391
target_pose.pose.orientation.y = -0.0342473638064
target_pose.pose.orientation.z = -0.640620609783
target_pose.pose.orientation.w = 0.74171390858
# Set the start state to the current state
right_arm.set_start_state_to_current_state()
# Set the goal pose of the end effector to the stored pose
right_arm.set_pose_target(target_pose, end_effector_link)
# Plan the trajectory to the goal
traj = right_arm.plan()
# Execute the planned trajectory
right_arm.execute(traj)
# Pause for a second
rospy.sleep(1)
# Finish up in the resting position
right_arm.set_named_target('resting')
right_arm.go()
# Shut down MoveIt cleanly
moveit_commander.roscpp_shutdown()
# Exit MoveIt
moveit_commander.os._exit(0)
if __name__ == "__main__":
MoveItDemo()
|
peterheim1/robbie_ros
|
robbie_moveit/nodes/get_box.py
|
Python
|
bsd-3-clause
| 3,864
|
from django.conf.urls import patterns, url
from .views import EmailAlternativeView
urlpatterns = patterns(
'',
url(r'^email_alternative/(?P<pk>\d+)/$',
EmailAlternativeView.as_view(),
name='email_alternative'),
)
|
bigmassa/django_mail_save
|
mail_save/urls.py
|
Python
|
mit
| 238
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_eth_trunk
version_added: "2.4"
short_description: Manages Eth-Trunk interfaces on HUAWEI CloudEngine switches.
description:
- Manages Eth-Trunk specific configuration parameters on HUAWEI CloudEngine switches.
author: QijunPan (@QijunPan)
notes:
- C(state=absent) removes the Eth-Trunk config and interface if it
already exists. If members to be removed are not explicitly
passed, all existing members (if any), are removed,
and Eth-Trunk removed.
- Members must be a list.
- This module requires the netconf system service be enabled on the remote device being managed.
- Recommended connection is C(netconf).
- This module also works with C(local) connections for legacy playbooks.
options:
trunk_id:
description:
- Eth-Trunk interface number.
The value is an integer.
The value range depends on the assign forward eth-trunk mode command.
When 256 is specified, the value ranges from 0 to 255.
When 512 is specified, the value ranges from 0 to 511.
When 1024 is specified, the value ranges from 0 to 1023.
required: true
mode:
description:
- Specifies the working mode of an Eth-Trunk interface.
choices: ['manual','lacp-dynamic','lacp-static']
min_links:
description:
- Specifies the minimum number of Eth-Trunk member links in the Up state.
The value is an integer ranging from 1 to the maximum number of interfaces
that can be added to a Eth-Trunk interface.
hash_type:
description:
- Hash algorithm used for load balancing among Eth-Trunk member interfaces.
choices: ['src-dst-ip', 'src-dst-mac', 'enhanced', 'dst-ip', 'dst-mac', 'src-ip', 'src-mac']
members:
description:
- List of interfaces that will be managed in a given Eth-Trunk.
The interface name must be full name.
force:
description:
- When true it forces Eth-Trunk members to match what is
declared in the members param. This can be used to remove
members.
type: bool
default: 'no'
state:
description:
- Manage the state of the resource.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: eth_trunk module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Ensure Eth-Trunk100 is created, add two members, and set to mode lacp-static
ce_eth_trunk:
trunk_id: 100
members: ['10GE1/0/24','10GE1/0/25']
mode: 'lacp-static'
state: present
provider: '{{ cli }}'
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"trunk_id": "100", "members": ['10GE1/0/24','10GE1/0/25'], "mode": "lacp-static"}
existing:
description: k/v pairs of existing Eth-Trunk
returned: always
type: dict
sample: {"trunk_id": "100", "hash_type": "mac", "members_detail": [
{"memberIfName": "10GE1/0/25", "memberIfState": "Down"}],
"min_links": "1", "mode": "manual"}
end_state:
description: k/v pairs of Eth-Trunk info after module execution
returned: always
type: dict
sample: {"trunk_id": "100", "hash_type": "mac", "members_detail": [
{"memberIfName": "10GE1/0/24", "memberIfState": "Down"},
{"memberIfName": "10GE1/0/25", "memberIfState": "Down"}],
"min_links": "1", "mode": "lacp-static"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["interface Eth-Trunk 100",
"mode lacp-static",
"interface 10GE1/0/25",
"eth-trunk 100"]
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_TRUNK = """
<filter type="subtree">
<ifmtrunk xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<TrunkIfs>
<TrunkIf>
<ifName>Eth-Trunk%s</ifName>
<minUpNum></minUpNum>
<maxUpNum></maxUpNum>
<trunkType></trunkType>
<hashType></hashType>
<workMode></workMode>
<upMemberIfNum></upMemberIfNum>
<memberIfNum></memberIfNum>
<TrunkMemberIfs>
<TrunkMemberIf>
<memberIfName></memberIfName>
<memberIfState></memberIfState>
</TrunkMemberIf>
</TrunkMemberIfs>
</TrunkIf>
</TrunkIfs>
</ifmtrunk>
</filter>
"""
CE_NC_XML_BUILD_TRUNK_CFG = """
<config>
<ifmtrunk xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<TrunkIfs>%s</TrunkIfs>
</ifmtrunk>
</config>
"""
CE_NC_XML_DELETE_TRUNK = """
<TrunkIf operation="delete">
<ifName>Eth-Trunk%s</ifName>
</TrunkIf>
"""
CE_NC_XML_CREATE_TRUNK = """
<TrunkIf operation="merge">
<ifName>Eth-Trunk%s</ifName>
</TrunkIf>
"""
CE_NC_XML_MERGE_MINUPNUM = """
<TrunkIf operation="merge">
<ifName>Eth-Trunk%s</ifName>
<minUpNum>%s</minUpNum>
</TrunkIf>
"""
CE_NC_XML_MERGE_HASHTYPE = """
<TrunkIf operation="merge">
<ifName>Eth-Trunk%s</ifName>
<hashType>%s</hashType>
</TrunkIf>
"""
CE_NC_XML_MERGE_WORKMODE = """
<TrunkIf operation="merge">
<ifName>Eth-Trunk%s</ifName>
<workMode>%s</workMode>
</TrunkIf>
"""
CE_NC_XML_BUILD_MEMBER_CFG = """
<TrunkIf>
<ifName>Eth-Trunk%s</ifName>
<TrunkMemberIfs>%s</TrunkMemberIfs>
</TrunkIf>
"""
CE_NC_XML_MERGE_MEMBER = """
<TrunkMemberIf operation="merge">
<memberIfName>%s</memberIfName>
</TrunkMemberIf>
"""
CE_NC_XML_DELETE_MEMBER = """
<TrunkMemberIf operation="delete">
<memberIfName>%s</memberIfName>
</TrunkMemberIf>
"""
MODE_XML2CLI = {"Manual": "manual", "Dynamic": "lacp-dynamic", "Static": "lacp-static"}
MODE_CLI2XML = {"manual": "Manual", "lacp-dynamic": "Dynamic", "lacp-static": "Static"}
HASH_XML2CLI = {"IP": "src-dst-ip", "MAC": "src-dst-mac", "Enhanced": "enhanced",
"Desip": "dst-ip", "Desmac": "dst-mac", "Sourceip": "src-ip", "Sourcemac": "src-mac"}
HASH_CLI2XML = {"src-dst-ip": "IP", "src-dst-mac": "MAC", "enhanced": "Enhanced",
"dst-ip": "Desip", "dst-mac": "Desmac", "src-ip": "Sourceip", "src-mac": "Sourcemac"}
def get_interface_type(interface):
"""Gets the type of interface, such as 10GE, ETH-TRUNK, VLANIF..."""
if interface is None:
return None
iftype = None
if interface.upper().startswith('GE'):
iftype = 'ge'
elif interface.upper().startswith('10GE'):
iftype = '10ge'
elif interface.upper().startswith('25GE'):
iftype = '25ge'
elif interface.upper().startswith('4X10GE'):
iftype = '4x10ge'
elif interface.upper().startswith('40GE'):
iftype = '40ge'
elif interface.upper().startswith('100GE'):
iftype = '100ge'
elif interface.upper().startswith('VLANIF'):
iftype = 'vlanif'
elif interface.upper().startswith('LOOPBACK'):
iftype = 'loopback'
elif interface.upper().startswith('METH'):
iftype = 'meth'
elif interface.upper().startswith('ETH-TRUNK'):
iftype = 'eth-trunk'
elif interface.upper().startswith('VBDIF'):
iftype = 'vbdif'
elif interface.upper().startswith('NVE'):
iftype = 'nve'
elif interface.upper().startswith('TUNNEL'):
iftype = 'tunnel'
elif interface.upper().startswith('ETHERNET'):
iftype = 'ethernet'
elif interface.upper().startswith('FCOE-PORT'):
iftype = 'fcoe-port'
elif interface.upper().startswith('FABRIC-PORT'):
iftype = 'fabric-port'
elif interface.upper().startswith('STACK-PORT'):
iftype = 'stack-port'
elif interface.upper().startswith('NULL'):
iftype = 'null'
else:
return None
return iftype.lower()
def mode_xml_to_cli_str(mode):
"""convert mode to cli format string"""
if not mode:
return ""
return MODE_XML2CLI.get(mode)
def hash_type_xml_to_cli_str(hash_type):
"""convert trunk hash type netconf xml to cli format string"""
if not hash_type:
return ""
return HASH_XML2CLI.get(hash_type)
class EthTrunk(object):
"""
Manages Eth-Trunk interfaces.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.__init_module__()
# module input info
self.trunk_id = self.module.params['trunk_id']
self.mode = self.module.params['mode']
self.min_links = self.module.params['min_links']
self.hash_type = self.module.params['hash_type']
self.members = self.module.params['members']
self.state = self.module.params['state']
self.force = self.module.params['force']
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
# interface info
self.trunk_info = dict()
def __init_module__(self):
""" init module """
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def netconf_set_config(self, xml_str, xml_name):
""" netconf set config """
recv_xml = set_nc_config(self.module, xml_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_trunk_dict(self, trunk_id):
""" get one interface attributes dict."""
trunk_info = dict()
conf_str = CE_NC_GET_TRUNK % trunk_id
recv_xml = get_nc_config(self.module, conf_str)
if "<data/>" in recv_xml:
return trunk_info
# get trunk base info
base = re.findall(
r'.*<ifName>(.*)</ifName>.*\s*'
r'<minUpNum>(.*)</minUpNum>.*\s*'
r'<maxUpNum>(.*)</maxUpNum>.*\s*'
r'<trunkType>(.*)</trunkType>.*\s*'
r'<hashType>(.*)</hashType>.*\s*'
r'<workMode>(.*)</workMode>.*\s*'
r'<upMemberIfNum>(.*)</upMemberIfNum>.*\s*'
r'<memberIfNum>(.*)</memberIfNum>.*', recv_xml)
if base:
trunk_info = dict(ifName=base[0][0],
trunkId=base[0][0].lower().replace("eth-trunk", "").replace(" ", ""),
minUpNum=base[0][1],
maxUpNum=base[0][2],
trunkType=base[0][3],
hashType=base[0][4],
workMode=base[0][5],
upMemberIfNum=base[0][6],
memberIfNum=base[0][7])
# get trunk member interface info
member = re.findall(
r'.*<memberIfName>(.*)</memberIfName>.*\s*'
r'<memberIfState>(.*)</memberIfState>.*', recv_xml)
trunk_info["TrunkMemberIfs"] = list()
for mem in member:
trunk_info["TrunkMemberIfs"].append(
dict(memberIfName=mem[0], memberIfState=mem[1]))
return trunk_info
def is_member_exist(self, ifname):
"""is trunk member exist"""
if not self.trunk_info["TrunkMemberIfs"]:
return False
for mem in self.trunk_info["TrunkMemberIfs"]:
if ifname.replace(" ", "").upper() == mem["memberIfName"].replace(" ", "").upper():
return True
return False
def get_mode_xml_str(self):
"""trunk mode netconf xml format string"""
return MODE_CLI2XML.get(self.mode)
def get_hash_type_xml_str(self):
"""trunk hash type netconf xml format string"""
return HASH_CLI2XML.get(self.hash_type)
def create_eth_trunk(self):
"""Create Eth-Trunk interface"""
xml_str = CE_NC_XML_CREATE_TRUNK % self.trunk_id
self.updates_cmd.append("interface Eth-Trunk %s" % self.trunk_id)
if self.hash_type:
self.updates_cmd.append("load-balance %s" % self.hash_type)
xml_str += CE_NC_XML_MERGE_HASHTYPE % (self.trunk_id, self.get_hash_type_xml_str())
if self.mode:
self.updates_cmd.append("mode %s" % self.mode)
xml_str += CE_NC_XML_MERGE_WORKMODE % (self.trunk_id, self.get_mode_xml_str())
if self.min_links:
self.updates_cmd.append("least active-linknumber %s" % self.min_links)
xml_str += CE_NC_XML_MERGE_MINUPNUM % (self.trunk_id, self.min_links)
if self.members:
mem_xml = ""
for mem in self.members:
mem_xml += CE_NC_XML_MERGE_MEMBER % mem.upper()
self.updates_cmd.append("interface %s" % mem)
self.updates_cmd.append("eth-trunk %s" % self.trunk_id)
xml_str += CE_NC_XML_BUILD_MEMBER_CFG % (self.trunk_id, mem_xml)
cfg_xml = CE_NC_XML_BUILD_TRUNK_CFG % xml_str
self.netconf_set_config(cfg_xml, "CREATE_TRUNK")
self.changed = True
def delete_eth_trunk(self):
"""Delete Eth-Trunk interface and remove all member"""
if not self.trunk_info:
return
xml_str = ""
mem_str = ""
if self.trunk_info["TrunkMemberIfs"]:
for mem in self.trunk_info["TrunkMemberIfs"]:
mem_str += CE_NC_XML_DELETE_MEMBER % mem["memberIfName"]
self.updates_cmd.append("interface %s" % mem["memberIfName"])
self.updates_cmd.append("undo eth-trunk")
if mem_str:
xml_str += CE_NC_XML_BUILD_MEMBER_CFG % (self.trunk_id, mem_str)
xml_str += CE_NC_XML_DELETE_TRUNK % self.trunk_id
self.updates_cmd.append("undo interface Eth-Trunk %s" % self.trunk_id)
cfg_xml = CE_NC_XML_BUILD_TRUNK_CFG % xml_str
self.netconf_set_config(cfg_xml, "DELETE_TRUNK")
self.changed = True
def remove_member(self):
"""delete trunk member"""
if not self.members:
return
change = False
mem_xml = ""
xml_str = ""
for mem in self.members:
if self.is_member_exist(mem):
mem_xml += CE_NC_XML_DELETE_MEMBER % mem.upper()
self.updates_cmd.append("interface %s" % mem)
self.updates_cmd.append("undo eth-trunk")
if mem_xml:
xml_str += CE_NC_XML_BUILD_MEMBER_CFG % (self.trunk_id, mem_xml)
change = True
if not change:
return
cfg_xml = CE_NC_XML_BUILD_TRUNK_CFG % xml_str
self.netconf_set_config(cfg_xml, "REMOVE_TRUNK_MEMBER")
self.changed = True
def merge_eth_trunk(self):
"""Create or merge Eth-Trunk"""
change = False
xml_str = ""
self.updates_cmd.append("interface Eth-Trunk %s" % self.trunk_id)
if self.hash_type and self.get_hash_type_xml_str() != self.trunk_info["hashType"]:
self.updates_cmd.append("load-balance %s" %
self.hash_type)
xml_str += CE_NC_XML_MERGE_HASHTYPE % (
self.trunk_id, self.get_hash_type_xml_str())
change = True
if self.min_links and self.min_links != self.trunk_info["minUpNum"]:
self.updates_cmd.append(
"least active-linknumber %s" % self.min_links)
xml_str += CE_NC_XML_MERGE_MINUPNUM % (
self.trunk_id, self.min_links)
change = True
if self.mode and self.get_mode_xml_str() != self.trunk_info["workMode"]:
self.updates_cmd.append("mode %s" % self.mode)
xml_str += CE_NC_XML_MERGE_WORKMODE % (
self.trunk_id, self.get_mode_xml_str())
change = True
if not change:
self.updates_cmd.pop() # remove 'interface Eth-Trunk' command
# deal force:
# When true it forces Eth-Trunk members to match
# what is declared in the members param.
if self.force and self.trunk_info["TrunkMemberIfs"]:
mem_xml = ""
for mem in self.trunk_info["TrunkMemberIfs"]:
if not self.members or mem["memberIfName"].replace(" ", "").upper() not in self.members:
mem_xml += CE_NC_XML_DELETE_MEMBER % mem["memberIfName"]
self.updates_cmd.append("interface %s" % mem["memberIfName"])
self.updates_cmd.append("undo eth-trunk")
if mem_xml:
xml_str += CE_NC_XML_BUILD_MEMBER_CFG % (self.trunk_id, mem_xml)
change = True
if self.members:
mem_xml = ""
for mem in self.members:
if not self.is_member_exist(mem):
mem_xml += CE_NC_XML_MERGE_MEMBER % mem.upper()
self.updates_cmd.append("interface %s" % mem)
self.updates_cmd.append("eth-trunk %s" % self.trunk_id)
if mem_xml:
xml_str += CE_NC_XML_BUILD_MEMBER_CFG % (
self.trunk_id, mem_xml)
change = True
if not change:
return
cfg_xml = CE_NC_XML_BUILD_TRUNK_CFG % xml_str
self.netconf_set_config(cfg_xml, "MERGE_TRUNK")
self.changed = True
def check_params(self):
"""Check all input params"""
# trunk_id check
if not self.trunk_id.isdigit():
self.module.fail_json(msg='The parameter of trunk_id is invalid.')
# min_links check
if self.min_links and not self.min_links.isdigit():
self.module.fail_json(msg='The parameter of min_links is invalid.')
# members check and convert members to upper
if self.members:
for mem in self.members:
if not get_interface_type(mem.replace(" ", "")):
self.module.fail_json(
msg='The parameter of members is invalid.')
for mem_id in range(len(self.members)):
self.members[mem_id] = self.members[mem_id].replace(" ", "").upper()
def get_proposed(self):
"""get proposed info"""
self.proposed["trunk_id"] = self.trunk_id
self.proposed["mode"] = self.mode
if self.min_links:
self.proposed["min_links"] = self.min_links
self.proposed["hash_type"] = self.hash_type
if self.members:
self.proposed["members"] = self.members
self.proposed["state"] = self.state
self.proposed["force"] = self.force
def get_existing(self):
"""get existing info"""
if not self.trunk_info:
return
self.existing["trunk_id"] = self.trunk_info["trunkId"]
self.existing["min_links"] = self.trunk_info["minUpNum"]
self.existing["hash_type"] = hash_type_xml_to_cli_str(self.trunk_info["hashType"])
self.existing["mode"] = mode_xml_to_cli_str(self.trunk_info["workMode"])
self.existing["members_detail"] = self.trunk_info["TrunkMemberIfs"]
def get_end_state(self):
"""get end state info"""
trunk_info = self.get_trunk_dict(self.trunk_id)
if not trunk_info:
return
self.end_state["trunk_id"] = trunk_info["trunkId"]
self.end_state["min_links"] = trunk_info["minUpNum"]
self.end_state["hash_type"] = hash_type_xml_to_cli_str(trunk_info["hashType"])
self.end_state["mode"] = mode_xml_to_cli_str(trunk_info["workMode"])
self.end_state["members_detail"] = trunk_info["TrunkMemberIfs"]
def work(self):
"""worker"""
self.check_params()
self.trunk_info = self.get_trunk_dict(self.trunk_id)
self.get_existing()
self.get_proposed()
# deal present or absent
if self.state == "present":
if not self.trunk_info:
# create
self.create_eth_trunk()
else:
# merge trunk
self.merge_eth_trunk()
else:
if self.trunk_info:
if not self.members:
# remove all members and delete trunk
self.delete_eth_trunk()
else:
# remove some trunk members
self.remove_member()
else:
self.module.fail_json(msg='Error: Eth-Trunk does not exist.')
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
trunk_id=dict(required=True),
mode=dict(required=False,
choices=['manual', 'lacp-dynamic', 'lacp-static'],
type='str'),
min_links=dict(required=False, type='str'),
hash_type=dict(required=False,
choices=['src-dst-ip', 'src-dst-mac', 'enhanced',
'dst-ip', 'dst-mac', 'src-ip', 'src-mac'],
type='str'),
members=dict(required=False, default=None, type='list'),
force=dict(required=False, default=False, type='bool'),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = EthTrunk(argument_spec)
module.work()
if __name__ == '__main__':
main()
|
kvar/ansible
|
lib/ansible/modules/network/cloudengine/ce_eth_trunk.py
|
Python
|
gpl-3.0
| 23,015
|
"""Parse (absolute and relative) URLs.
urlparse module is based upon the following RFC specifications.
RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
and L. Masinter, January 2005.
RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter
and L.Masinter, December 1999.
RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T.
Berners-Lee, R. Fielding, and L. Masinter, August 1998.
RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zwinski, July 1998.
RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
1995.
RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
McCahill, December 1994
RFC 3986 is considered the current standard and any future changes to
urlparse module should conform with it. The urlparse module is
currently not entirely compliant with this RFC due to defacto
scenarios for parsing, and for backward compatibility purposes, some
parsing quirks from older RFCs are retained. The testcases in
test_urlparse.py provides a good indicator of parsing behavior.
"""
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
# A classification of schemes ('' means apply by default)
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
'wais', 'file', 'https', 'shttp', 'mms',
'prospero', 'rtsp', 'rtspu', '', 'sftp',
'svn', 'svn+ssh']
uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
'imap', 'wais', 'file', 'mms', 'https', 'shttp',
'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
'svn', 'svn+ssh', 'sftp','nfs','git', 'git+ssh']
non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
'mms', '', 'sftp']
uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
'nntp', 'wais', 'https', 'shttp', 'snews',
'file', 'prospero', '']
# Characters valid in scheme names
scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789'
'+-.')
MAX_CACHE_SIZE = 20
_parse_cache = {}
def clear_cache():
"""Clear the parse cache."""
_parse_cache.clear()
class ResultMixin(object):
"""Shared methods for the parsed result objects."""
@property
def username(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
userinfo = userinfo.split(":", 1)[0]
return userinfo
return None
@property
def password(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)[1]
return None
@property
def hostname(self):
netloc = self.netloc.split('@')[-1]
if '[' in netloc and ']' in netloc:
return netloc.split(']')[0][1:].lower()
elif ':' in netloc:
return netloc.split(':')[0].lower()
elif netloc == '':
return None
else:
return netloc.lower()
@property
def port(self):
netloc = self.netloc.split('@')[-1].split(']')[-1]
if ':' in netloc:
port = netloc.split(':')[1]
return int(port, 10)
else:
return None
from collections import namedtuple
class SplitResult(namedtuple('SplitResult', 'scheme netloc path query fragment'), ResultMixin):
__slots__ = ()
def geturl(self):
return urlunsplit(self)
class ParseResult(namedtuple('ParseResult', 'scheme netloc path params query fragment'), ResultMixin):
__slots__ = ()
def geturl(self):
return urlunparse(self)
def urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
tuple = urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = tuple
if scheme in uses_params and ';' in url:
url, params = _splitparams(url)
else:
params = ''
return ParseResult(scheme, netloc, url, params, query, fragment)
def _splitparams(url):
if '/' in url:
i = url.find(';', url.rfind('/'))
if i < 0:
return url, ''
else:
i = url.find(';')
return url[:i], url[i+1:]
def _splitnetloc(url, start=0):
delim = len(url) # position of end of domain part of url, default is end
for c in '/?#': # look for delimiters; the order is NOT important
wdelim = url.find(c, start) # find first of this delim
if wdelim >= 0: # if found
delim = min(delim, wdelim) # use earliest delim position
return url[start:delim], url[delim:] # return (domain, rest)
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return cached
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
for c in url[:i]:
if c not in scheme_chars:
break
else:
try:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path
_testportnum = int(url[i+1:])
except ValueError:
scheme, url = url[:i].lower(), url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and scheme in uses_fragment and '#' in url:
url, fragment = url.split('#', 1)
if scheme in uses_query and '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
def urlunparse(data):
"""Put a parsed URL back together again. This may result in a
slightly different, but equivalent URL, if the URL that was parsed
originally had redundant delimiters, e.g. a ? with an empty query
(the draft states that these are equivalent)."""
scheme, netloc, url, params, query, fragment = data
if params:
url = "%s;%s" % (url, params)
return urlunsplit((scheme, netloc, url, query, fragment))
def urlunsplit(data):
"""Combine the elements of a tuple as returned by urlsplit() into a
complete URL as a string. The data argument can be any five-item iterable.
This may result in a slightly different, but equivalent URL, if the URL that
was parsed originally had unnecessary delimiters (for example, a ? with an
empty query; the RFC states that these are equivalent)."""
scheme, netloc, url, query, fragment = data
if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
if url and url[:1] != '/': url = '/' + url
url = '//' + (netloc or '') + url
if scheme:
url = scheme + ':' + url
if query:
url = url + '?' + query
if fragment:
url = url + '#' + fragment
return url
def urljoin(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter."""
if not base:
return url
if not url:
return base
bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
urlparse(base, '', allow_fragments)
scheme, netloc, path, params, query, fragment = \
urlparse(url, bscheme, allow_fragments)
if scheme != bscheme or scheme not in uses_relative:
return url
if scheme in uses_netloc:
if netloc:
return urlunparse((scheme, netloc, path,
params, query, fragment))
netloc = bnetloc
if path[:1] == '/':
return urlunparse((scheme, netloc, path,
params, query, fragment))
if not path and not params:
path = bpath
params = bparams
if not query:
query = bquery
return urlunparse((scheme, netloc, path,
params, query, fragment))
segments = bpath.split('/')[:-1] + path.split('/')
# XXX The stuff below is bogus in various ways...
if segments[-1] == '.':
segments[-1] = ''
while '.' in segments:
segments.remove('.')
while 1:
i = 1
n = len(segments) - 1
while i < n:
if (segments[i] == '..'
and segments[i-1] not in ('', '..')):
del segments[i-1:i+1]
break
i = i+1
else:
break
if segments == ['', '..']:
segments[-1] = ''
elif len(segments) >= 2 and segments[-1] == '..':
segments[-2:] = ['']
return urlunparse((scheme, netloc, '/'.join(segments),
params, query, fragment))
def urldefrag(url):
"""Removes any existing fragment from URL.
Returns a tuple of the defragmented URL and the fragment. If
the URL contained no fragments, the second element is the
empty string.
"""
if '#' in url:
s, n, p, a, q, frag = urlparse(url)
defrag = urlunparse((s, n, p, a, q, ''))
return defrag, frag
else:
return url, ''
# unquote method for parse_qs and parse_qsl
# Cannot use directly from urllib as it would create a circular reference
# because urllib uses urlparse methods (urljoin). If you update this function,
# update it also in urllib. This code duplication does not existin in Python3.
_hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a+b, chr(int(a+b,16)))
for a in _hexdig for b in _hexdig)
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
res = s.split('%')
# fastpath
if len(res) == 1:
return s
s = res[0]
for item in res[1:]:
try:
s += _hextochr[item[:2]] + item[2:]
except KeyError:
s += '%' + item
except UnicodeDecodeError:
s += unichr(int(item[:2], 16)) + item[2:]
return s
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
dict = {}
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
if name in dict:
dict[name].append(value)
else:
dict[name] = [value]
return dict
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
strict_parsing: flag indicating what to do with parsing errors. If
false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
Returns a list, as G-d intended.
"""
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError, "bad query field: %r" % (name_value,)
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = unquote(nv[0].replace('+', ' '))
value = unquote(nv[1].replace('+', ' '))
r.append((name, value))
return r
|
huran2014/huran.github.io
|
wot_gateway/usr/lib/python2.7/urlparse.py
|
Python
|
gpl-2.0
| 14,414
|
from bibliopixel.animation.strip import Strip
from bibliopixel.colors import COLORS
from bibliopixel.colors.arithmetic import color_scale
class ColorFade(Strip):
"""Fill the dots progressively along the strip."""
COLOR_DEFAULTS = ('colors', [COLORS.Red]),
def wave_range(self, start, peak, step):
main = [i for i in range(start, peak + 1, step)]
return main + [i for i in reversed(main[0:len(main) - 1])]
def __init__(self, layout, step=5, start=0, end=-1, **kwds):
super().__init__(layout, start, end, **kwds)
self._levels = self.wave_range(30, 255, step)
self._level_count = len(self._levels)
def pre_run(self):
self._step = 0
def step(self, amt=1):
c_index, l_index = divmod(self._step, self._level_count)
color = self.palette(c_index)
color = color_scale(color, self._levels[l_index])
self.layout.fill(color, self._start, self._end)
self._step += amt
|
ManiacalLabs/BiblioPixelAnimations
|
BiblioPixelAnimations/strip/ColorFade.py
|
Python
|
mit
| 973
|
from unittest import TestCase
import re
from scrapy.http import Response, Request
from scrapy.spider import Spider
from scrapy.contrib.downloadermiddleware.cookies import CookiesMiddleware
class CookiesMiddlewareTest(TestCase):
def assertCookieValEqual(self, first, second, msg=None):
cookievaleq = lambda cv: re.split(';\s*', cv)
return self.assertEqual(
sorted(cookievaleq(first)),
sorted(cookievaleq(second)), msg)
def setUp(self):
self.spider = Spider('foo')
self.mw = CookiesMiddleware()
def tearDown(self):
del self.mw
def test_basic(self):
headers = {'Set-Cookie': 'C1=value1; path=/'}
req = Request('http://scrapytest.org/')
assert self.mw.process_request(req, self.spider) is None
assert 'Cookie' not in req.headers
res = Response('http://scrapytest.org/', headers=headers)
assert self.mw.process_response(req, res, self.spider) is res
#assert res.cookies
req2 = Request('http://scrapytest.org/sub1/')
assert self.mw.process_request(req2, self.spider) is None
self.assertEquals(req2.headers.get('Cookie'), "C1=value1")
def test_dont_merge_cookies(self):
# merge some cookies into jar
headers = {'Set-Cookie': 'C1=value1; path=/'}
req = Request('http://scrapytest.org/')
res = Response('http://scrapytest.org/', headers=headers)
assert self.mw.process_response(req, res, self.spider) is res
# test Cookie header is not seted to request
req = Request('http://scrapytest.org/dontmerge', meta={'dont_merge_cookies': 1})
assert self.mw.process_request(req, self.spider) is None
assert 'Cookie' not in req.headers
# check that returned cookies are not merged back to jar
res = Response('http://scrapytest.org/dontmerge', headers={'Set-Cookie': 'dont=mergeme; path=/'})
assert self.mw.process_response(req, res, self.spider) is res
# check that cookies are merged back
req = Request('http://scrapytest.org/mergeme')
assert self.mw.process_request(req, self.spider) is None
self.assertEquals(req.headers.get('Cookie'), 'C1=value1')
# check that cookies are merged when dont_merge_cookies is passed as 0
req = Request('http://scrapytest.org/mergeme', meta={'dont_merge_cookies': 0})
assert self.mw.process_request(req, self.spider) is None
self.assertEquals(req.headers.get('Cookie'), 'C1=value1')
def test_complex_cookies(self):
# merge some cookies into jar
cookies = [{'name': 'C1', 'value': 'value1', 'path': '/foo', 'domain': 'scrapytest.org'},
{'name': 'C2', 'value': 'value2', 'path': '/bar', 'domain': 'scrapytest.org'},
{'name': 'C3', 'value': 'value3', 'path': '/foo', 'domain': 'scrapytest.org'},
{'name': 'C4', 'value': 'value4', 'path': '/foo', 'domain': 'scrapy.org'}]
req = Request('http://scrapytest.org/', cookies=cookies)
self.mw.process_request(req, self.spider)
# embed C1 and C3 for scrapytest.org/foo
req = Request('http://scrapytest.org/foo')
self.mw.process_request(req, self.spider)
assert req.headers.get('Cookie') in ('C1=value1; C3=value3', 'C3=value3; C1=value1')
# embed C2 for scrapytest.org/bar
req = Request('http://scrapytest.org/bar')
self.mw.process_request(req, self.spider)
self.assertEquals(req.headers.get('Cookie'), 'C2=value2')
# embed nothing for scrapytest.org/baz
req = Request('http://scrapytest.org/baz')
self.mw.process_request(req, self.spider)
assert 'Cookie' not in req.headers
def test_merge_request_cookies(self):
req = Request('http://scrapytest.org/', cookies={'galleta': 'salada'})
assert self.mw.process_request(req, self.spider) is None
self.assertEquals(req.headers.get('Cookie'), 'galleta=salada')
headers = {'Set-Cookie': 'C1=value1; path=/'}
res = Response('http://scrapytest.org/', headers=headers)
assert self.mw.process_response(req, res, self.spider) is res
req2 = Request('http://scrapytest.org/sub1/')
assert self.mw.process_request(req2, self.spider) is None
self.assertCookieValEqual(req2.headers.get('Cookie'), "C1=value1; galleta=salada")
def test_cookiejar_key(self):
req = Request('http://scrapytest.org/', cookies={'galleta': 'salada'}, meta={'cookiejar': "store1"})
assert self.mw.process_request(req, self.spider) is None
self.assertEquals(req.headers.get('Cookie'), 'galleta=salada')
headers = {'Set-Cookie': 'C1=value1; path=/'}
res = Response('http://scrapytest.org/', headers=headers, request=req)
assert self.mw.process_response(req, res, self.spider) is res
req2 = Request('http://scrapytest.org/', meta=res.meta)
assert self.mw.process_request(req2, self.spider) is None
self.assertCookieValEqual(req2.headers.get('Cookie'),'C1=value1; galleta=salada')
req3 = Request('http://scrapytest.org/', cookies={'galleta': 'dulce'}, meta={'cookiejar': "store2"})
assert self.mw.process_request(req3, self.spider) is None
self.assertEquals(req3.headers.get('Cookie'), 'galleta=dulce')
headers = {'Set-Cookie': 'C2=value2; path=/'}
res2 = Response('http://scrapytest.org/', headers=headers, request=req3)
assert self.mw.process_response(req3, res2, self.spider) is res2
req4 = Request('http://scrapytest.org/', meta=res2.meta)
assert self.mw.process_request(req4, self.spider) is None
self.assertCookieValEqual(req4.headers.get('Cookie'), 'C2=value2; galleta=dulce')
#cookies from hosts with port
req5_1 = Request('http://scrapytest.org:1104/')
assert self.mw.process_request(req5_1, self.spider) is None
headers = {'Set-Cookie': 'C1=value1; path=/'}
res5_1 = Response('http://scrapytest.org:1104/', headers=headers, request=req5_1)
assert self.mw.process_response(req5_1, res5_1, self.spider) is res5_1
req5_2 = Request('http://scrapytest.org:1104/some-redirected-path')
assert self.mw.process_request(req5_2, self.spider) is None
self.assertEquals(req5_2.headers.get('Cookie'), 'C1=value1')
req5_3 = Request('http://scrapytest.org/some-redirected-path')
assert self.mw.process_request(req5_3, self.spider) is None
self.assertEquals(req5_3.headers.get('Cookie'), 'C1=value1')
#skip cookie retrieval for not http request
req6 = Request('file:///scrapy/sometempfile')
assert self.mw.process_request(req6, self.spider) is None
self.assertEquals(req6.headers.get('Cookie'), None)
|
scrapinghub/scrapy
|
tests/test_downloadermiddleware_cookies.py
|
Python
|
bsd-3-clause
| 6,842
|
#pylint: disable=invalid-name,too-many-branches
"""A script for generating DataCite DOI's for Mantid releases, to be called by
a Jenkins job during the release process. When given a major, minor and patch
release number along with username and password credentials, it will build a
DOI of the form "10.5286/Software/Mantid[major].[minor].[patch]", and post it
to the DataCite DOI API.
A special one-time "main" landing page DOI will be created using:
python doi.py --username=[] --password=[] --main 3.0.0
Then at every release, the script will run again without the "--main" flag to
generate a DOI pointing to the release notes for that particular version.
If a password is not provided then it is prompted.
Using the "--test" flag will run the script and post DOI's to the DataCite test
server at https://test.datacite.org/mds/doi/10.5286/Software/.
Using the "--debug" flag should print out some (hopefully) useful extra info
about what is going on under the hood.
Using the "--delete" flag will the DOI metadata with the given details
"inactive", as well as pointing the DOI to a "DOI invalid" page.
NOTES:
- A requirement for the script to run is for cURL to be installed and its
executable to be on the PATH.
- If your connection requires a proxy to be manually configured then ensure the
http_proxy environment variable is set when the script is executed.
- The "www.mantidproject.org" domain had to be registered with DataCite (on
both the test server and the main server) before a valid DOI could be
created. This was done through the British Library, via Tom Griffin.
- Mantid DOIs will be "linked" using the relationship identifiers available in
the metadata schema. Each incremental-release DOI will be linked to the
previous DOI using the "IsNextVersionOf" field. The metadata for the
previous DOI will then have to be changed to include a "IsPreviousVersionOf"
field. Each incremental-release DOI will also be linked to the "main" Mantid
DOI via a "IsPartOf" field. The main DOI itself will have no relationship
identifiers.
USEFUL LINKS:
- The DataCite DOI API documentation can be found at:
https://mds.datacite.org/static/apidoc
- Example Python code for submitting DOI's and metadata:
https://github.com/datacite/mds/blob/master/client/python/put_doi.py
https://github.com/datacite/mds/blob/master/client/python/put_metadata.py
- HTTP status codes:
http://docs.python.org/2/library/httplib.html#httplib.HTTPS_PORT
"""
import argparse
import getpass
import os
import xml.etree.ElementTree as ET
import subprocess
import re
from datetime import date
import authors
# Successful responses from the DataCite servers appear to only come in one of
# two forms:
# - 'OK'
# - 'OK ([DOI])'
SUCCESS_RESPONSE = r'^OK( \((.+)\))?$'
# Point all "deleted" DOIs to here:
INVALID_URL = 'http://www.datacite.org/invalidDOI'
def build_xml_form(doi, relationships, creator_name_list, version_str):
'''Builds the xml form containing the metadata for the DOI. Where helpful,
comments showing the definition / allowed values of the data fields have
been taken from section 2.3 of:
http://schema.datacite.org/meta/kernel-3/doc/DataCite-MetadataKernel_v3.0.pdf
The decision has been made to not use the optional "contributors" field,
since creators works just as well and is mandatory anyway.
'''
# The root resource node must contain the various schema information.
root = ET.Element('resource')
root.set('xmlns', 'http://datacite.org/schema/kernel-3')
root.set('xmlns:xsi', 'http://www.w3.org/2001/XMLSchema-instance')
root.set('xsi:schemaLocation', 'http://datacite.org/schema/kernel-3 ht' +
'tp://schema.datacite.org/meta/kernel-3' +
'/metadata.xsd')
# "The identifier is a unique string that identifies a resource." In our
# case, the actual DOI. "Format should be '10.1234/foo'."
identifier = ET.SubElement(root, 'identifier')
identifier.text = doi
identifier.set('identifierType', 'DOI')
# Creators are defined as "the main researchers involved in producing the
# data, or the authors of the publication, in priority order". Allowed
# values are "a corporate/institutional or personal name".
#
# Use all authors up to and including the version tag if creating the
# "main" DOI, else only use authors who contributed to that version.
creators = ET.SubElement(root, 'creators')
for creator_name in creator_name_list:
creator = ET.SubElement(creators, 'creator')
ET.SubElement(creator, 'creatorName').text = creator_name
# Titles are defined as a "name or title by which a resource is known".
title_version = " " + version_str if version_str else ""
title_text_list = 'Mantid%s: Manipulation and Analysis' % title_version + \
' Toolkit for Instrument Data.',
titles = ET.SubElement(root, 'titles')
for title_text in title_text_list:
ET.SubElement(titles, 'title').text = title_text
# "The name of the entity that holds, archives, publishes, prints,
# distributes, releases, issues, or produces the resource. This property
# will be used to formulate the citation, so consider the prominence of
# the role."
ET.SubElement(root, 'publisher').text = 'Mantid Project'
# "The year when the data was or will be made publicly available."
ET.SubElement(root, 'publicationYear').text = str(date.today().year)
# "Subject, keyword, classification code, or key phrase describing the
# resource."
subject_text_list = [
'Neutron Scattering',
'Muon Spin Resonance',
'Data Analysis'
]
subjects = ET.SubElement(root, 'subjects')
for subject_text in subject_text_list:
ET.SubElement(subjects, 'subject').text = subject_text
# "The primary language of the resource."
ET.SubElement(root, 'language').text = 'en'
# "A description of the resource. The format is open, but the
# preferred format is a single term of some detail so that a pair can be
# formed with the sub-property." Just using the general type "software"
# seems good enough for our purposes.
resource_type = ET.SubElement(root, 'resourceType')
resource_type.text = ''
resource_type.set('resourceTypeGeneral', 'Software')
# "The version number of the resource." Suggested practice is to "register
# a new identifier for a major version change." We'll be ignoring this
# as we're having a new DOI for every major/minor/patch release.
if version_str:
ET.SubElement(root, 'version').text = version_str
# "Identifiers of related resources. These must be globally unique
# identifiers."
if relationships:
related_identifiers = ET.SubElement(root, 'relatedIdentifiers')
for doi, relation_type in relationships.items():
related_identifier = ET.SubElement(
related_identifiers, 'relatedIdentifier'
)
related_identifier.text = doi
related_identifier.set('relatedIdentifierType', 'DOI')
related_identifier.set('relationType', relation_type)
# "Provide a rights management statement for the resource or reference a
# service providing such information. Include embargo information if
# applicable. Use the complete title of a license and include version
# information if applicable."
rights_list = ET.SubElement(root, 'rightsList')
rights = ET.SubElement(rights_list, 'rights')
rights.text = 'GNU General Public Release (Version 3)'
rights.set('rightsURI', 'http://www.gnu.org/licenses/gpl.html')
# "All additional information that does not fit in any of the other
# categories. May be used for technical information."
descriptions = ET.SubElement(root, 'descriptions')
description = ET.SubElement(descriptions, 'description')
description.text = 'Mantid: A high performance framework for the ' + \
'reduction and analysis of muon spin resonance and ' + \
'neutron scattering data.'
description.set('descriptionType', 'Abstract')
return ET.tostring(root, encoding='utf-8')
def _http_request(body, method, url, options):
'''Issue an HTTP request with the given options.
We are forced to use a command line tool for this rather than use the
in-built Python libraries since httplib, urllib and urllib2 all seem to
have problems using HTTPS through the proxy at RAL. HTTP works fine,
but the DOI API is encrypted so that is not an option.
We prefer cURL to wget since it exists on many Linux machines and even
comes bundled with Git Bash for Windows! Some good info on scripting
with cURL can be found at:
http://curl.haxx.se/docs/httpscripting.html'''
args = [
'curl',
'--user', options.username + ':' + options.password,
'--header', 'Content-Type:text/plain;charset=UTF-8',
# The bodies of HTTP messages must be encoded:
'--data', body.encode('utf-8'),
'--request', method,
]
if 'http_proxy' in os.environ:
args.extend(['--proxy', os.environ['http_proxy']])
# Set how loud cURL should be while running.
if options.debug:
args.append('--verbose')
else:
args.append('--silent')
args.append(url)
proc = subprocess.Popen(args,stdout=subprocess.PIPE)
result = proc.stdout.readlines()
print "Server Response: " + str(result)
return result
def delete_doi(base, doi, options):
'''Will attempt to delete the given DOI. Note that this does not actually
remove the DOI from the DataCite servers; it makes its metadata "inactive"
and points the DOI to a "DOI invalid" page.
'''
print "\nAttempting to delete the meta data for:" + doi
result = _http_request(
body = '',
method = 'DELETE',
url = base + "metadata/" + doi,
options = options
)
if not re.match(SUCCESS_RESPONSE, result[0]):
raise RuntimeError('Deleting metadata unsuccessful. Quitting.')
print "\nAttempting to point " + doi + " to invalid page."
result = _http_request(
body = 'doi=' + doi + '\n' + 'url=' + INVALID_URL,
method = "PUT",
url = base + "doi/" + doi,
options = options
)
if not re.match(SUCCESS_RESPONSE, result[0]):
raise RuntimeError('Pointing DOI to invalid page was unsuccessful.')
def create_or_update_metadata(xml_form, base, doi, options):
'''Attempts to create some new metadata for the doi of the given address.
Metadata must be created before a doi can be created. If the metadata
already exists, then it will simply be updated.
'''
print "\nAttempting to create / update metadata:"
result = _http_request(
body = xml_form,
method = "PUT",
url = base + "metadata/" + doi,
options = options
)
if not re.match(SUCCESS_RESPONSE, result[0]):
raise RuntimeError('Creation/updating metadata unsuccessful. Quitting.')
def create_or_update_doi(base, doi, destination, options):
'''Attempts to create a new DOI of the given address. Metadata must be
created before this can be successful. If the doi already exists, then it
will simply be updated.
'''
print "\nAttempting to create / update the following DOI:"
print 'DOI = ' + doi
print 'URL = ' + destination
result = _http_request(
body = 'doi=' + doi + '\n' + 'url=' + destination,
method = "PUT",
url = base + "doi/" + doi,
options = options
)
if not re.match(SUCCESS_RESPONSE, result[0]):
raise RuntimeError('Creation/updating DOI unsuccessful. Quitting.')
def check_if_doi_exists(base, doi, destination, options):
'''Attempts to check if the given doi exists by querying the server and
seeing if what comes back is the expected DOI destination. Returns True
if a doi is found (and the destination returned by the server is the same
as the given destination), else false. Throws if the response from the
server is unrecognised, or if there is no response at all.
'''
print "\nChecking if \"" + base + "doi/" + doi + "\" DOI exists."
result = _http_request(
body = '',
method = 'GET',
url = base + "doi/" + doi,
options = options
)
if result[0] == 'DOI not found' or result[0] == INVALID_URL:
print "\"" + doi + "\" does not exist"
return False
elif result[0] == destination:
print "DOI found."
return True
else:
raise RuntimeError(
"Unexpected result back from server: \"" + result[0] + "\"")
def check_for_curl():
'''A check to see whether we can call cURL on the command line.
'''
# See if a call to 'curl --version' gives us a successful return code,
# else raise an exception.
try:
proc = subprocess.Popen(['curl', '--version'],stdout=subprocess.PIPE)
proc.wait()
if proc.returncode == 0:
found = True
else:
found = False
except OSError:
found = False
if not found:
raise RuntimeError('This script requires that cURL be installed and ' +
'available on the PATH.')
def get_urls_for_doi(version_str, shortened_version_str,
prev_version_str, shortened_prev_version_str):
# Beginning with v3.6.0 the release notes moved to docs.mantidproject.org but
# the transition happened after the release so the following rules apply
# - all versions 3.7.0 & above have release notes on a versioned url at docs.mantidproject.org
# - 3.5.2, 3.6.0, 3.6.1 have been manually inserted but the notes only exist in nightly builds
# - 3.5.1 and before all point to the wiki
major, minor, patch = authors.get_major_minor_patch(version_str)
sphinx_rel_notes_url = 'http://docs.mantidproject.org/{0}/release/{1}/index.html'
wiki_rel_notes_url = 'http://www.mantidproject.org/Release_Notes_{0}'
if major > 3 or (major == 3 and minor >= 7):
destination = sphinx_rel_notes_url.format('v' + version_str, 'v'+ version_str)
prev_destination = sphinx_rel_notes_url.format('v' + prev_version_str, 'v'+ prev_version_str)
elif major == 3:
if minor == 5:
if patch >= 2:
destination = sphinx_rel_notes_url.format('nightly', 'v'+ version_str)
if patch == 2:
prev_destination = wiki_rel_notes_url.format(shortened_prev_version_str)
else:
prev_destination = sphinx_rel_notes_url.format('nightly', 'v'+ prev_version_str)
elif minor == 6:
if patch >= 1:
destination = sphinx_rel_notes_url.format('v' + version_str, 'v'+ version_str)
else:
destination = sphinx_rel_notes_url.format('nightly', 'v'+ version_str)
if patch >= 2:
prev_destination = sphinx_rel_notes_url.format('v' + prev_version_str, 'v'+ prev_version_str)
else:
prev_destination = sphinx_rel_notes_url.format('nightly', 'v'+ prev_version_str)
else:
destination = wiki_rel_notes_url.format(shortened_version_str)
prev_destination = wiki_rel_notes_url.format(shortened_prev_version_str)
else:
destination = wiki_rel_notes_url.format(shortened_version_str)
prev_destination = wiki_rel_notes_url.format(shortened_prev_version_str)
return destination, prev_destination
def run(args):
'''Creating a usable DOI is (for our purposes at least) a two step
process: metadata has to be constructed and then sent to the server, and
then the DOI itself has to be sent once the metadata is in place.
If pre-existing DOI's or metadata are submitted then they will overwrite
what was there previously.
We also have to amend the metadata for the previous DOI (if one exists),
so that we can set up a IsPreviousVersionOf/IsNewVersionOf relationship
between the two DOIs.
'''
# Get the git tag as well as the version before it if this is an incremental release.
version_str = args.version
shortened_version_str = authors.get_shortened_version_string(version_str)
tag = authors.find_tag(version_str)
if not args.main:
prev_tag = authors.get_previous_tag(tag)
prev_version_str = authors.get_version_from_git_tag(prev_tag)
shortened_prev_version_str = authors.get_shortened_version_string(prev_version_str)
main_doi = '10.5286/Software/Mantid'
if args.main:
doi = main_doi
prev_doi = ''
has_previous_version = False
else: # Incremental release DOI.
prev_doi = '10.5286/Software/Mantid' + shortened_prev_version_str
doi = '10.5286/Software/Mantid' + shortened_version_str
if args.main:
destination = 'http://www.mantidproject.org'
else:
destination, prev_destination = get_urls_for_doi(version_str, shortened_version_str,
prev_version_str, shortened_prev_version_str)
if args.test:
server_url_base = 'https://test.datacite.org/mds/'
else:
server_url_base = 'https://mds.datacite.org/'
if not args.password:
args.password = getpass.getpass()
if args.delete:
delete_doi(server_url_base, doi, args)
quit()
# If the user ran this script with the --main flag, then all we need to do
# is create a single, unlinked DOI to the main project page.
if args.main:
creator_name_list = authors.authors_up_to_git_tag(tag)
# In the case of the main DOI we need to add the whitelisted names too.
creator_name_list = sorted(set(creator_name_list + authors.whitelist))
xml_form = build_xml_form(doi, {}, creator_name_list, None)
create_or_update_metadata(xml_form, server_url_base, doi, args)
create_or_update_doi(server_url_base, doi, destination, args)
# Else it's an incremental-release DOI that we need to make.
else:
has_previous_version = check_if_doi_exists(
server_url_base,
prev_doi,
prev_destination,
args
)
relationships = { main_doi : 'IsPartOf' }
if has_previous_version:
relationships[prev_doi] = 'IsNewVersionOf'
creator_name_list = authors.authors_under_git_tag(tag)
xml_form = build_xml_form(
doi,
relationships,
creator_name_list,
version_str
)
# Create/update the metadata and DOI.
create_or_update_metadata(xml_form, server_url_base, doi, args)
create_or_update_doi(server_url_base, doi, destination, args)
# Create/update the metadata and DOI of the previous version, if it
# was found to have a DOI.
if has_previous_version:
prev_relationships = {
main_doi : 'IsPartOf',
doi : 'IsPreviousVersionOf'
}
prev_creator_name_list = authors.authors_under_git_tag(prev_tag)
prev_xml_form = build_xml_form(
prev_doi,
prev_relationships,
prev_creator_name_list,
prev_version_str
)
create_or_update_metadata(
prev_xml_form,
server_url_base,
prev_doi,
args
)
# Print out a custom success message, depending on the initial options.
if not args.test:
method = "resolved"
doi_add = 'http://dx.doi.org/' + doi
meta_add = 'https://mds.datacite.org/metadata/' + doi
prev_meta_add = 'https://mds.datacite.org/metadata/' + prev_doi
else:
method = "inspected"
doi_add = 'https://test.datacite.org/mds/doi/' + doi
meta_add = 'https://test.datacite.org/mds/metadata/' + doi
prev_meta_add = 'https://test.datacite.org/mds/metadata/' + prev_doi
if has_previous_version:
message = "\nSUCCESS!" + \
"\nThe DOI can be %s at \"%s\"." % (method, doi_add) + \
"\nThe new metadata can be inspected at \"%s\"." % (meta_add) + \
"\nThe previous version's metadata can be inspected at" + \
"\"%s\"." % (prev_meta_add)
else:
message = "\nSUCCESS!" + \
"\nThe DOI can be %s at \"%s\"." % (method, doi_add) + \
"\nThe metadata can be inspected at \"%s\"." % (meta_add)
print message
quit()
if __name__ == "__main__":
check_for_curl()
parser = argparse.ArgumentParser(
description="Script to generate the DOI needed for a Mantid release."
)
# REQUIRED
parser.add_argument(
'version',
type=str,
help='Version of Mantid whose DOI is to be created/updated in the form "major.minor.patch"'
)
parser.add_argument(
'--username',
type=str,
required=True,
help='Username to access DOI API.'
)
# OPTIONAL
parser.add_argument(
'--password',
type=str,
help='Password for the server. If missing then a prompt is displayed requesting input'
)
parser.add_argument(
'--test',
action='store_true',
help='Send submissions to the test server to trial run the script.'
)
parser.add_argument(
'--debug',
action='store_true',
help='Turn debug mode on. Basically, makes cURL more talkative.'
)
parser.add_argument(
'--main',
action='store_true',
help='Create the "main" DOI for Mantid. Once it is created, this ' +
'will only have to run again if it needs to be updated.'
)
parser.add_argument(
'--delete',
action='store_true',
help='Delete ("make inactive") the DOI metadata with the given ' +
'details. Note that this does NOT delete the DOI.'
)
run(parser.parse_args())
|
wdzhou/mantid
|
tools/DOI/doi.py
|
Python
|
gpl-3.0
| 22,271
|
# coding=utf-8
# Copyright 2022 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Basic tests for emily's model."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensor2tensor.models.video import emily
from tensor2tensor.models.video import tests_utils
import tensorflow.compat.v1 as tf
class NextFrameTest(tests_utils.BaseNextFrameTest):
def testEmily(self):
self.TestOnVariousInputOutputSizes(
emily.next_frame_emily(),
emily.NextFrameEmily,
1)
if __name__ == "__main__":
tf.test.main()
|
tensorflow/tensor2tensor
|
tensor2tensor/models/video/emily_test.py
|
Python
|
apache-2.0
| 1,125
|
#!/usr/bin/env python
from data.hdf5 import taxi_it
from visualizer import Vlist, Point
_sample_size = 5000
if __name__ == '__main__':
points = Vlist(cluster=True)
for line in taxi_it('train'):
if len(line['latitude'])>0:
points.append(Point(line['latitude'][-1], line['longitude'][-1]))
if len(points) >= _sample_size:
break
points.save('destinations (cluster)')
points.cluster = False
points.heatmap = True
points.save('destinations (heatmap)')
|
Saumya-Suvarna/machine-learning
|
Route_prediction/visualizer/extractor/destinations.py
|
Python
|
apache-2.0
| 521
|
"""Tests for the data_helper.check module"""
import sys, unittest
from BaseTest import BaseTestWrapper
class IsBoolTestCase(BaseTestWrapper.BaseTest):
"""check.is_bool() test cases"""
def test_string(self):
"""Test if string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_number(self):
"""Test if a number is False"""
x = 12345
self.assertFalse(self._bt['func'](x))
def test_list(self):
"""Test if a list is False"""
x = []
self.assertFalse(self._bt['func'](x))
def test_bool(self):
"""Test if a bool is True"""
x = True
self.assertTrue(self._bt['func'](x))
class IsStrTestCase(BaseTestWrapper.BaseTest):
"""check.is_str() test cases"""
def test_string(self):
"""Test if string is True"""
x = 'y'
self.assertTrue(self._bt['func'](x))
def test_number(self):
"""Test if a number is False"""
x = 12345
self.assertFalse(self._bt['func'](x))
def test_list(self):
"""Test if a list is False"""
x = []
self.assertFalse(self._bt['func'](x))
class IsStrEmptyTestCase(BaseTestWrapper.BaseTest):
"""check.is_str_empty() test cases"""
def test_empty_string(self):
"""Test if an empty string is True"""
x = ''
self.assertTrue(self._bt['func'](x))
def test_string(self):
"""Test if non empty string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_number(self):
"""Test if a number is False"""
x = 12345
self.assertFalse(self._bt['func'](x))
def test_list(self):
"""Test if a list is False"""
x = []
self.assertFalse(self._bt['func'](x))
class IsStrNotEmptyTestCase(BaseTestWrapper.BaseTest):
"""check.is_str_not_empty() test cases"""
def test_empty_string(self):
"""Test if an empty string is False"""
x = ''
self.assertFalse(self._bt['func'](x))
def test_string(self):
"""Test if non empty string is True"""
x = 'y'
self.assertTrue(self._bt['func'](x))
def test_number(self):
"""Test if a number is False"""
x = 12345
self.assertFalse(self._bt['func'](x))
def test_list(self):
"""Test if a list is False"""
x = []
self.assertFalse(self._bt['func'](x))
class IsIntTestCase(BaseTestWrapper.BaseTest):
"""check.is_int() test cases"""
def test_string(self):
"""Test if string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_positive_int(self):
"""Test if a positive int is detected"""
x = 12345
self.assertTrue(self._bt['func'](x))
def test_negative_int(self):
"""Test if a negative int is detected"""
x = -12345
self.assertTrue(self._bt['func'](x))
class IsIntNotNegTestCase(BaseTestWrapper.BaseTest):
"""check.is_int_not_neg() test cases"""
def test_string(self):
"""Test if string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_positive_int(self):
"""Test if a positive int is detected"""
x = 12345
self.assertTrue(self._bt['func'](x))
def test_zero(self):
"""Test if zero is detected"""
x = 0
self.assertTrue(self._bt['func'](x))
def test_negative_int(self):
"""Test if a negative int is detected"""
x = -12345
self.assertFalse(self._bt['func'](x))
class IsIntPosTestCase(BaseTestWrapper.BaseTest):
"""check.is_int_pos() test cases"""
def test_string(self):
"""Test if string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_positive_int(self):
"""Test if a positive int is detected"""
x = 12345
self.assertTrue(self._bt['func'](x))
def test_zero(self):
"""Test if zero is detected"""
x = 0
self.assertFalse(self._bt['func'](x))
def test_negative_int(self):
"""Test if a negative int is detected"""
x = -12345
self.assertFalse(self._bt['func'](x))
class IsIntNegTestCase(BaseTestWrapper.BaseTest):
"""check.is_int_pos() test cases"""
def test_string(self):
"""Test if string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_positive_int(self):
"""Test if a positive int is detected"""
x = 12345
self.assertFalse(self._bt['func'](x))
def test_zero(self):
"""Test if zero is detected"""
x = 0
self.assertFalse(self._bt['func'](x))
def test_negative_int(self):
"""Test if a negative int is detected"""
x = -12345
self.assertTrue(self._bt['func'](x))
class IsListTestCase(BaseTestWrapper.BaseTest):
"""check.is_list() test cases"""
def test_string(self):
"""Test if string is False"""
x = 'y'
self.assertFalse(self._bt['func'](x))
def test_dict(self):
"""Test if dict is False"""
x = {}
self.assertFalse(self._bt['func'](x))
def test_list(self):
"""Test if list is True"""
x = []
self.assertTrue(self._bt['func'](x))
class HasWhitespaceTestCase(BaseTestWrapper.BaseTest):
"""check.has_whitespace() test cases"""
def test_space(self):
"""Test if whitespace is detected"""
l = [
'hello world',
' ',
' space'
]
for s in l:
self.assertTrue(self._bt['func'](s))
def test_no_space(self):
"""Test if no whitespace is detected"""
l = [
'hello',
'',
'none'
]
for s in l:
self.assertFalse(self._bt['func'](s))
loader = unittest.TestLoader()
suite = loader.loadTestsFromModule(sys.modules[__name__])
if __name__ == '__main__':
result = unittest.result.TestResult()
suite.run(result)
print result
for f in result.failures:
for t in f:
print t
print ''
for e in result.errors:
for t in e:
print t
print ''
|
qevo/py_data_helper
|
tests/check.py
|
Python
|
mit
| 6,291
|
class Calculator(object):
def add(self, operanda, operandb):
return operanda + operandb
|
donlee888/JsObjects
|
Python/PythonTest02/src/calc/calculator.py
|
Python
|
mit
| 123
|
# Copyright (c) 2011 Amit Levy <amit@amitlevy.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import base
from models.person import Person
from django.utils import simplejson
class CirclesHandler(base.BaseController):
def show(self, circle):
people = None
if circle == '*':
people = Person.get(self.account.people)
else:
people = self.account.people_in(circle)
people = map(
lambda person: dict(key=str(person.key()),
name=person.name,
identifiers=person.identifiers,
circles=person.circles),
people)
self.write(simplejson.dumps(dict(people=people)));
|
alevy/Dunbarify
|
handlers/circles.py
|
Python
|
mit
| 1,739
|
# -*- coding: utf-8 -*-
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 0211
#
# (c) 2008 - 2010 by Alexander Schier
import random, yaml
class pyNiall:
def __init__(self):
self.words = [">"] #[word1, word2, word3, ...]
self.next = {0: set()} #id: [next-id1, next-id2, ...]
self.prob = {} #(id, next-id): times seen (probabilty = this / total)
#these can be calculated from the vars above
self.prev = {-1: set()} #id: [prev-id1, prev-id2, ...]
self.rank = {} #{id: totalrank}
def _addRelation(self, word1, word2):
"""
#adds "hello world" and "hello user" without endrelation
>>> n=pyNiall()
>>> n._addRelation(">", "hello")
>>> n._addRelation("hello", "world")
>>> n._addRelation(">", "hello")
>>> n._addRelation("hello", "user")
>>> n.words
['>', 'hello', 'world', 'user']
>>> n.next
{0: set([1]), 1: set([2, 3]), 2: set([]), 3: set([])}
>>> n.prev
{1: set([0]), 2: set([1]), 3: set([1]), -1: set([])}
>>> n.prob
{(0, 1): 2, (1, 2): 1, (1, 3): 1}
>>> n.rank
{1: 2, 2: 1, 3: 1}
"""
if not word1 in self.words:
print "Error in Database: word1 not in words"
return
index1 = self.words.index(word1)
#create word if needed
if not word2 in self.words:
self.words.append(word2) #create word (and index)
index2 = self.words.index(word2)
self.next[index2] = set() #create empty association set
else:
index2 = self.words.index(word2) #get index
#add next relation
if not index2 in self.next[index1]: #not associated, yet
self.next[index1].add(index2) #add
self.prob[(index1, index2)] = 1 #first time
else:
self.prob[(index1, index2)] += 1
#add previous relation
if not index2 in self.prev.keys():
self.prev[index2] = set([index1])
else:
self.prev[index2].add(index1)
#totalrank of word
if not index2 in self.rank.keys():
self.rank[index2] = 1
else:
self.rank[index2] += 1
def _addEndRelation(self, word):
"""
>>> n=pyNiall()
>>> n._addRelation(">", "hello")
>>> n._addRelation("hello", "world")
>>> n._addEndRelation("world") #"hello world"
>>> n._addRelation(">", "hello")
>>> n._addEndRelation("hello") #"hello"
>>> n.next
{0: set([1]), 1: set([2, -1]), 2: set([-1])}
>>> n.prev
{1: set([0]), 2: set([1]), -1: set([1, 2])}
>>> n.prob
{(0, 1): 2, (1, 2): 1, (2, -1): 1, (1, -1): 1}
>>> n.rank #the same as in _addRelation
{1: 2, 2: 1}
"""
index = self.words.index(word)
#calculate next
if not - 1 in self.next[index]:
self.next[index].add(-1)
self.prob[(index, -1)] = 1
else:
self.prob[(index, -1)] += 1
#calculate prev
if not index in self.prev[-1]:
self.prev[-1].add(index)
def _rankWord(self, word):
"""
rank a word by length and probability
"""
rank = 0
length = len(word)
rank = self.rank[self.words.index(word)]
return rank + length * 0.7
def _createRandomSentence(self, index, sentence, forward=True):
candidates = []
if forward:
for index2 in self.next[index]:
candidates += [index2] * self.prob[(index, index2)]
else:
for index2 in self.prev[index]:
candidates += [index2] * self.prob[(index2, index)]
newindex = random.choice(candidates)
if newindex == 0: #sentence start
return sentence.strip()
if newindex == -1: #sentence end
#return sentence
return (sentence + " " + self.words[index]).strip()
if forward:
if index == 0: #no ">" included
return self._createRandomSentence(newindex, "")
return self._createRandomSentence(newindex, sentence + " " + self.words[index])
else:
if index == -1: #no sentence end included
return self._createRandomSentence(newindex, "", False)
#attention: here we use newindex, so the current word is NOT part of the sentence,
#while the current word IS part of the sentence when scanning forward.
#so we can use forward+" "+backward to build a sentence
return self._createRandomSentence(newindex, self.words[newindex] + " " + sentence, False).strip()
def _createReply(self, msg):
words = msg.strip().split(" ")
bestword = None
bestwordrank = 0
for word in words:
#no fresh learned words as context! (else the bot just echos)
if not self.rank[self.words.index(word)] > 1:
continue
rank = self._rankWord(word)
if rank > bestwordrank:
bestwordrank = rank
bestword = word
if bestword:
index = self.words.index(bestword)
return self._createRandomSentence(index, "", False) + " " + self._createRandomSentence(index, "")
else:
return self._createRandomSentence(0, "")
def learn(self, msg):
words = msg.lower().split(" ")
oldword = ">"
for word in words:
word = word.strip()
if len(word):
self._addRelation(oldword, word)
oldword = word
if oldword != ">":
self._addEndRelation(oldword)
def reply(self, msg):
self.learn(msg)
if len(self.words) < 200: #if we reply with context to early, the bot just echos
return self._createRandomSentence(0, "")
else:
return self._createReply(msg.lower()).strip()
def import_brain(self, data):
tmp = yaml.load_all(data)
self.words = tmp.next()
self.next = tmp.next()
self.prob = tmp.next()
#calculate the optional fields
for id in self.next.keys():
for id2 in self.next[id]:
if id2 in self.prev:
self.prev[id2].add(id)
else:
self.prev[id2] = set([id])
for (a, b) in self.prob.keys():
if b != -1 and b in self.rank:
self.rank[b] += self.prob[(a, b)]
else:
self.rank[b] = self.prob[(a, b)]
def export_brain(self):
return yaml.dump_all([self.words, self.next, self.prob])
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Farthen/OTFBot
|
otfbot/lib/pyniall.py
|
Python
|
gpl-2.0
| 7,446
|
# -*- coding:utf-8 -*-
from datetime import datetime
import time
from bson.objectid import ObjectId
from pymongo import ASCENDING, DESCENDING
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
# todo change name
import turbo.model
import turbo.util
import turbo_motor.model
from .settings import (
MONGO_DB_MAPPING as _MONGO_DB_MAPPING,
DB_ENGINE_MAPPING as _DB_ENGINE_MAPPING,
)
class BaseModel(turbo.model.BaseModel):
package_space = globals()
def __init__(self, db_name='test'):
super(BaseModel, self).__init__(db_name, _MONGO_DB_MAPPING)
def get_count(self, spec=None):
return self.find(spec=spec).count()
class SqlBaseModel(object):
def __init__(self, db_name='test'):
engine = _DB_ENGINE_MAPPING[db_name]
self.Base = declarative_base(bind=engine)
self.Session = self.create_session(engine)
def create_session(self, engine):
return sessionmaker(bind=engine)
class MotorBaseModel(turbo_motor.model.BaseModel):
package_space = globals()
def __init__(self, db_name='test'):
super(MotorBaseModel, self).__init__(db_name, _MONGO_DB_MAPPING)
|
wecatch/app-turbo
|
demos/models/base.py
|
Python
|
apache-2.0
| 1,195
|
"""
Music loading.
"""
import os
import re
import time
import configobj
import taglib
from pisak import res, dirs, utils, logger
from pisak.audio import db_manager
_LOG = logger.get_logger(__name__)
_LIBRARY_DIR = dirs.get_user_dir("music")
_COVER_EXTENSIONS = [
".jpg", ".jpeg", ".png", ".bmp"]
_LOAD_TRACKER = os.path.join(dirs.HOME_PISAK_DIR, "music_load_tracker.ini")
_FAKE_COVER_NAME = "fake_cover.png"
_UNKNOWN_LITERAL_TAG = "nieznane"
_UNKNOWN_NUMERICAL_TAG = 0
def load_all():
"""
Load information about the music library in the filesystem and
insert them to the database.
"""
last_load_time = _get_last_load_time()
tracks = list()
db = db_manager.DBLoader()
for current in [_LIBRARY_DIR] + os.listdir(_LIBRARY_DIR):
if current is not _LIBRARY_DIR:
current = os.path.join(_LIBRARY_DIR, current)
if os.path.isdir(current) and os.path.getmtime(current) > last_load_time:
# use os.walk here only to find all the files in
# the current directory:
for _, _, files in os.walk(current):
if files:
folder_name = os.path.split(current)[-1]
cover_path = utils.find_folder_image(
files, folder_name.lower(), current, _COVER_EXTENSIONS)
if not cover_path:
cover_path = os.path.join(current, _FAKE_COVER_NAME)
utils.produce_identicon(current, save_path=cover_path)
folder_id = db.insert_folder(folder_name, cover_path)
for file_name in files:
path = os.path.join(current, file_name)
meta = _get_metadata(path, file_name)
if meta:
meta.update({'path': path,
'cover_path': cover_path,
'folder_id': folder_id})
tracks.append(meta)
break
db.insert_many_tracks(tracks)
db.close()
_update_last_load_time(time.time())
def _get_last_load_time():
if not os.path.isfile(_LOAD_TRACKER):
return 0
else:
return configobj.ConfigObj(
_LOAD_TRACKER, encoding='UTF8').as_float("last_load_time")
def _update_last_load_time(time):
conf = configobj.ConfigObj(_LOAD_TRACKER, encoding='UTF8')
conf["last_load_time"] = time
conf.write()
def _get_metadata(path, file_name):
try:
file_tags = taglib.File(path).tags
except OSError:
_LOG.warning("Taglib could not read file: {}.".format(path))
return False
metadata = dict()
for tag, func in _TAG_EXTRACTORS.items():
func(tag, metadata, file_tags, file_name)
return metadata
def _extract_title(tag, metadata, file_tags, file_name):
if _extract_literal_tag(tag, metadata, file_tags):
return
metadata["title"] = os.path.splitext(file_name)[0]
def _extract_tracknumber(tag, metadata, file_tags, file_name):
if _extract_numerical_tag(tag, metadata, file_tags, 'no'):
return
no = _extract_number(file_name)
metadata["no"] = no if no else _UNKNOWN_NUMERICAL_TAG
def _extract_date(tag, metadata, file_tags, file_name):
if _extract_numerical_tag(tag, metadata, file_tags, 'year'):
return
metadata["year"] = _UNKNOWN_NUMERICAL_TAG
def _extract_other(tag, metadata, file_tags, file_name):
if _extract_literal_tag(tag, metadata, file_tags):
return
metadata[tag.lower()] = _UNKNOWN_LITERAL_TAG
def _extract_literal_tag(tag, metadata, file_tags, alias=None):
if tag in file_tags:
value = file_tags[tag]
if len(value) > 0:
metadata[alias or tag.lower()] = value[0]
return True
return False
def _extract_numerical_tag(tag, metadata, file_tags, alias=None):
if tag in file_tags:
value = file_tags[tag]
if len(value) > 0:
num = _extract_number(value[0])
if num:
metadata[alias or tag.lower()] = num
return True
return False
_TAG_EXTRACTORS = {"DATE": _extract_date, "TITLE": _extract_title,
"TRACKNUMBER": _extract_tracknumber, "GENRE": _extract_other,
"ARTIST": _extract_other, "ALBUM": _extract_other}
def _extract_number(string):
num = re.findall("([0-9]+)", string)
if num:
return int(num[0])
|
BrainTech/pisak
|
pisak/audio/data_loader.py
|
Python
|
gpl-3.0
| 4,509
|
# Copyright (C) 2005 Christian Limpach <Christian.Limpach@cl.cam.ac.uk>
# Copyright (C) 2005 XenSource Ltd
# This file is subject to the terms and conditions of the GNU General
# Public License. See the file "COPYING" in the main directory of
# this archive for more details.
import threading
from xen.xend.xenstore.xsutil import xshandle
from xen.xend.XendLogging import log
class xswatch:
##
# Create a watch on the given path in the store. The watch will fire
# immediately, then subsequently each time the watched path is changed,
# until the watch is deregistered, either by the return value from the
# watch callback being False, or by an explicit call to unwatch.
#
# @param fn The function to be called when the watch fires. This function
# should take the path that has changed as its first argument, followed by
# the extra arguments given to this constructor, if any. It should return
# True if the watch is to remain registered, or False if it is to be
# deregistered.
#
def __init__(self, path, fn, *args, **kwargs):
self.path = path
self.fn = fn
self.args = args
self.kwargs = kwargs
watchStart()
xs.watch(path, self)
def unwatch(self):
xs.unwatch(self.path, self)
watchThread = None
xs = None
xslock = threading.Lock()
def watchStart():
global watchThread
global xs
xslock.acquire()
try:
if watchThread:
return
xs = xshandle()
watchThread = threading.Thread(name="Watcher", target=watchMain)
watchThread.setDaemon(True)
watchThread.start()
finally:
xslock.release()
def watchMain():
while True:
try:
we = xs.read_watch()
watch = we[1]
res = watch.fn(we[0], *watch.args, **watch.kwargs)
if not res:
watch.unwatch()
except:
log.exception("read_watch failed")
# Ignore this exception -- there's no point throwing it
# further on because that will just kill the watcher thread,
# which achieves nothing.
|
andreiw/xen3-arm-tegra
|
tools/python/xen/xend/xenstore/xswatch.py
|
Python
|
gpl-2.0
| 2,161
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = '40523138'
SITENAME = '2016Fall CPA 課程網誌 (虎尾科大MDE)'
# 不要用文章所在目錄作為類別
USE_FOLDER_AS_CATEGORY = False
#PATH = 'content'
#OUTPUT_PATH = 'output'
TIMEZONE = 'Asia/Taipei'
DEFAULT_LANG = 'en'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('pelican-bootstrap3', 'https://github.com/DandyDev/pelican-bootstrap3/'),
('pelican-plugins', 'https://github.com/getpelican/pelican-plugins'),
('Tipue search', 'https://github.com/Tipue/Tipue-Search'),)
# Social widget
#SOCIAL = (('You can add links in your config file', '#'),('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
# 必須絕對目錄或相對於設定檔案所在目錄
PLUGIN_PATHS = ['plugin']
PLUGINS = ['liquid_tags.notebook', 'summary', 'tipue_search', 'sitemap']
# for sitemap plugin
SITEMAP = {
'format': 'xml',
'priorities': {
'articles': 0.5,
'indexes': 0.5,
'pages': 0.5
},
'changefreqs': {
'articles': 'monthly',
'indexes': 'daily',
'pages': 'monthly'
}
}
# search is for Tipue search
DIRECT_TEMPLATES = (('index', 'tags', 'categories', 'authors', 'archives', 'search'))
# for pelican-bootstrap3 theme settings
#TAG_CLOUD_MAX_ITEMS = 50
DISPLAY_CATEGORIES_ON_SIDEBAR = True
DISPLAY_RECENT_POSTS_ON_SIDEBAR = True
DISPLAY_TAGS_ON_SIDEBAR = True
DISPLAY_TAGS_INLINE = True
TAGS_URL = "tags.html"
CATEGORIES_URL = "categories.html"
#SHOW_ARTICLE_AUTHOR = True
#MENUITEMS = [('Home', '/'), ('Archives', '/archives.html'), ('Search', '/search.html')]
|
s40523138/2016fallcp_hw
|
pelicanconf.py
|
Python
|
agpl-3.0
| 1,952
|
#!/usr/bin/env python
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Certificate chain where the intermediate restricts the extended key usage to
clientAuth, and the target asserts serverAuth + clientAuth."""
import sys
sys.path += ['../..']
import gencerts
# Self-signed root certificate.
root = gencerts.create_self_signed_root_certificate('Root')
# Intermediate certificate.
intermediate = gencerts.create_intermediate_certificate('Intermediate', root)
intermediate.get_extensions().set_property('extendedKeyUsage',
'clientAuth')
# Target certificate.
target = gencerts.create_end_entity_certificate('Target', intermediate)
target.get_extensions().set_property('extendedKeyUsage',
'serverAuth,clientAuth')
chain = [target, intermediate, root]
gencerts.write_chain(__doc__, chain, 'chain.pem')
|
nwjs/chromium.src
|
net/data/verify_certificate_chain_unittest/intermediate-eku-clientauth/generate-chains.py
|
Python
|
bsd-3-clause
| 1,005
|
from unittest import TestCase
from similarityPy.algorithms.find_nearest import FindNearest
from tests import test_logger
__author__ = 'cenk'
class FindNearestTest(TestCase):
def setUp(self):
pass
def test_algorithm(self):
test_logger.debug("FindNearestTest - test_algorithm Starts")
points = "abcdef"
point = "abcdefg"
with self.assertRaises(TypeError) as context:
FindNearest(points, point, "")
self.assertEqual("You must initialize array and a point",
context.exception.message)
test_logger.debug("FindNearestTest - test_algorithm Starts")
|
pombredanne/similarityPy
|
tests/algorihtm_tests/find_nearest_test.py
|
Python
|
mit
| 651
|
# Copyright (C) 2016 Intel Corporation
# Released under the MIT license (see COPYING.MIT)
from time import sleep
from oeqa.core.case import OETestCase
from oeqa.core.decorator.oetimeout import OETimeout
class TimeoutTest(OETestCase):
@OETimeout(1)
def testTimeoutPass(self):
self.assertTrue(True, msg='How is this possible?')
@OETimeout(1)
def testTimeoutFail(self):
sleep(2)
self.assertTrue(True, msg='How is this possible?')
|
schleichdi2/OPENNFR-6.1-CORE
|
opennfr-openembedded-core/meta/lib/oeqa/core/tests/cases/timeout.py
|
Python
|
gpl-2.0
| 472
|
from core.vectors import PhpCode, ShellCmd, ModuleExec, Os
from core.module import Module
from core.loggers import log
from core import messages
import urlparse
import telnetlib
import time
class Tcp(Module):
"""Spawn a shell on a TCP port."""
def init(self):
self.register_info(
{
'author': [
'Emilio Pinna'
],
'license': 'GPLv3'
}
)
self.register_vectors(
[
ShellCmd(
"nc -l -p ${port} -e ${shell}",
name = 'netcat',
target = Os.NIX,
background = True
),
ShellCmd(
"rm -rf /tmp/f;mkfifo /tmp/f;cat /tmp/f|${shell} -i 2>&1|nc -l ${port} >/tmp/f; rm -rf /tmp/f",
name = 'netcat_bsd',
target = Os.NIX,
background = True
),
ShellCmd(
"""python -c 'import pty,os,socket;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.bind(("", ${port}));s.listen(1);(rem, addr) = s.accept();os.dup2(rem.fileno(),0);os.dup2(rem.fileno(),1);os.dup2(rem.fileno(),2);pty.spawn("${shell}");s.close()';""",
name = 'python_pty',
target = Os.NIX,
background = True
)
]
)
self.register_arguments([
{ 'name' : 'port', 'help' : 'Port to spawn', 'type' : int },
{ 'name' : '-shell', 'help' : 'Specify shell', 'default' : '/bin/sh' },
{ 'name' : '-no-autonnect', 'help' : 'Skip autoconnect', 'action' : 'store_true', 'default' : False },
{ 'name' : '-vector', 'choices' : self.vectors.get_names() }
])
def run(self):
# Run all the vectors
for vector in self.vectors:
# Skip vector if -vector is specified but does not match
if self.args.get('vector') and self.args.get('vector') != vector.name:
continue
# Background run does not return results
vector.run(self.args)
# If set, skip autoconnect
if self.args.get('no_autoconnect'): continue
# Give some time to spawn the shell
time.sleep(1)
urlparsed = urlparse.urlparse(self.session['url'])
if not urlparsed.hostname:
log.debug(
messages.module_backdoor_tcp.error_parsing_connect_s % self.args['port']
)
continue
try:
telnetlib.Telnet(urlparsed.hostname, self.args['port'], timeout = 5).interact()
# If telnetlib does not rise an exception, we can assume that
# ended correctly and return from `run()`
return
except Exception as e:
log.debug(
messages.module_backdoor_tcp.error_connecting_to_s_s_s % (
urlparsed.hostname,
self.args['port'],
e
)
)
# If autoconnect was expected but Telnet() calls worked,
# prints error message
if not self.args.get('no_autoconnect'):
log.warn(
messages.module_backdoor_tcp.error_connecting_to_s_s_s % (
urlparsed.hostname,
self.args['port'],
'remote port not open or unreachable'
)
)
|
marrocamp/weevely3
|
modules/backdoor/tcp.py
|
Python
|
gpl-3.0
| 3,520
|
"""Settings of Zinnia"""
from django.conf import settings
MEDIA_URL = getattr(settings, 'BLOGQUINTET_MEDIA_URL', '/blogquintet/')
|
franckbret/django-blog-quintet
|
blogquintet/settings.py
|
Python
|
mit
| 131
|
# encoding: utf-8
"""
encapsulation.py
Created by Thomas Mangin on 2014-06-20.
Copyright (c) 2014-2015 Orange. All rights reserved.
Copyright (c) 2014-2015 Exa Networks. All rights reserved.
"""
from struct import pack
from struct import unpack
from exabgp.bgp.message.update.attribute.community.extended import ExtendedCommunity
# ================================================================ Encapsulation
# RFC 5512
class Encapsulation (ExtendedCommunity):
COMMUNITY_TYPE = 0x03
COMMUNITY_SUBTYPE = 0x0C
# https://www.iana.org/assignments/bgp-parameters/bgp-parameters.xhtml#tunnel-types
class Type:
DEFAULT = 0x00
L2TPv3 = 0x01
GRE = 0x02
IPIP = 0x07
VXLAN = 0x08
NVGRE = 0x09
MPLS = 0x10
VXLAN_GPE = 0x0C
MPLS_UDP = 0x0D
_string = {
Type.DEFAULT : "Default",
Type.L2TPv3 : "L2TPv3",
Type.GRE : "GRE",
Type.IPIP : "IP-in-IP",
Type.VXLAN : "VXLAN",
Type.NVGRE : "NVGRE",
Type.MPLS : "MPLS",
Type.VXLAN_GPE: "VXLAN-GPE",
Type.MPLS_UDP : "MPLS-in-UDP",
}
__slots__ = ['tunnel_type']
def __init__ (self,tunnel_type,community=None):
self.tunnel_type = tunnel_type
ExtendedCommunity.__init__(self,community if community is not None else pack("!BBLH",0x03,0x0C,0,self.tunnel_type))
def __str__ (self):
return "Encapsulation: %s" % Encapsulation._string.get(self.tunnel_type,"Encap:(unknown:%d)" % self.tunnel_type)
@staticmethod
def unpack (data):
tunnel, = unpack('!H',data[6:8])
return Encapsulation(tunnel,data[:8])
# type_ = ord(data[0]) & 0x0F
# stype = ord(data[1])
# assert(type_==Encapsulation.COMMUNITY_TYPE)
# assert(stype==Encapsulation.COMMUNITY_SUBTYPE)
# assert(len(data)==6)
Encapsulation.register_extended()
|
lochiiconnectivity/exabgp
|
lib/exabgp/bgp/message/update/attribute/community/extended/encapsulation.py
|
Python
|
bsd-3-clause
| 1,758
|
"""
Listens events:
forget (string)
Given string can be task name, remembered field (url, imdb_url) or a title. If given value is a
task name then everything in that task will be forgotten. With title all learned fields from it and the
title will be forgotten. With field value only that particular field is forgotten.
"""
from __future__ import unicode_literals, division, absolute_import
import contextlib
import logging
from datetime import datetime, timedelta
from sqlalchemy import Column, Integer, DateTime, Unicode, Boolean, or_, select, update, Index
from sqlalchemy.orm import relation
from sqlalchemy.schema import ForeignKey
from flexget import db_schema, options, plugin
from flexget.event import event
from flexget.manager import Session
from flexget.utils.imdb import is_imdb_url, extract_id
from flexget.utils.sqlalchemy_utils import table_schema, table_add_column
from flexget.utils.tools import console
log = logging.getLogger('seen')
Base = db_schema.versioned_base('seen', 4)
@db_schema.upgrade('seen')
def upgrade(ver, session):
if ver is None:
log.info('Converting seen imdb_url to imdb_id for seen movies.')
field_table = table_schema('seen_field', session)
for row in session.execute(select([field_table.c.id, field_table.c.value], field_table.c.field == 'imdb_url')):
new_values = {'field': 'imdb_id', 'value': extract_id(row['value'])}
session.execute(update(field_table, field_table.c.id == row['id'], new_values))
ver = 1
if ver == 1:
field_table = table_schema('seen_field', session)
log.info('Adding index to seen_field table.')
Index('ix_seen_field_seen_entry_id', field_table.c.seen_entry_id).create(bind=session.bind)
ver = 2
if ver == 2:
log.info('Adding local column to seen_entry table')
table_add_column('seen_entry', 'local', Boolean, session, default=False)
ver = 3
if ver == 3:
# setting the default to False in the last migration was broken, fix the data
log.info('Repairing seen table')
entry_table = table_schema('seen_entry', session)
session.execute(update(entry_table, entry_table.c.local == None, {'local': False}))
ver = 4
return ver
class SeenEntry(Base):
__tablename__ = 'seen_entry'
id = Column(Integer, primary_key=True)
title = Column(Unicode)
reason = Column(Unicode)
task = Column('feed', Unicode)
added = Column(DateTime)
local = Column(Boolean)
fields = relation('SeenField', backref='seen_entry', cascade='all, delete, delete-orphan')
def __init__(self, title, task, reason=None, local=False):
self.title = title
self.reason = reason
self.task = task
self.added = datetime.now()
self.local = local
def __str__(self):
return '<SeenEntry(title=%s,reason=%s,task=%s,added=%s)>' % (self.title, self.reason, self.task, self.added)
class SeenField(Base):
__tablename__ = 'seen_field'
id = Column(Integer, primary_key=True)
seen_entry_id = Column(Integer, ForeignKey('seen_entry.id'), nullable=False, index=True)
field = Column(Unicode)
value = Column(Unicode, index=True)
added = Column(DateTime)
def __init__(self, field, value):
self.field = field
self.value = value
self.added = datetime.now()
def __str__(self):
return '<SeenField(field=%s,value=%s,added=%s)>' % (self.field, self.value, self.added)
@event('forget')
def forget(value):
"""
See module docstring
:param string value: Can be task name, entry title or field value
:return: count, field_count where count is number of entries removed and field_count number of fields
"""
log.debug('forget called with %s' % value)
session = Session()
try:
count = 0
field_count = 0
for se in session.query(SeenEntry).filter(or_(SeenEntry.title == value, SeenEntry.task == value)).all():
field_count += len(se.fields)
count += 1
log.debug('forgetting %s' % se)
session.delete(se)
for sf in session.query(SeenField).filter(SeenField.value == value).all():
se = session.query(SeenEntry).filter(SeenEntry.id == sf.seen_entry_id).first()
field_count += len(se.fields)
count += 1
log.debug('forgetting %s' % se)
session.delete(se)
return count, field_count
finally:
session.commit()
session.close()
class FilterSeen(object):
"""
Remembers previously downloaded content and rejects them in
subsequent executions. Without this plugin FlexGet would
download all matching content on every execution.
This plugin is enabled on all tasks by default.
See wiki for more information.
"""
def __init__(self):
# remember and filter by these fields
self.fields = ['title', 'url', 'original_url']
self.keyword = 'seen'
def validator(self):
from flexget import validator
root = validator.factory()
root.accept('boolean')
root.accept('choice').accept_choices(['global', 'local'])
return root
@plugin.priority(255)
def on_task_filter(self, task, config, remember_rejected=False):
"""Filter seen entries"""
if config is False:
log.debug('%s is disabled' % self.keyword)
return
fields = self.fields
local = config == 'local'
for entry in task.entries:
# construct list of values looked
values = []
for field in fields:
if field not in entry:
continue
if entry[field] not in values and entry[field]:
values.append(unicode(entry[field]))
if values:
log.trace('querying for: %s' % ', '.join(values))
# check if SeenField.value is any of the values
found = task.session.query(SeenField).join(SeenEntry).filter(SeenField.value.in_(values))
if local:
found = found.filter(SeenEntry.task == task.name)
else:
found = found.filter(SeenEntry.local == False)
found = found.first()
if found:
log.debug("Rejecting '%s' '%s' because of seen '%s'" % (entry['url'], entry['title'], found.value))
se = task.session.query(SeenEntry).filter(SeenEntry.id == found.seen_entry_id).one()
entry.reject('Entry with %s `%s` is already marked seen in the task %s at %s' %
(found.field, found.value, se.task, se.added.strftime('%Y-%m-%d %H:%M')),
remember=remember_rejected)
def on_task_learn(self, task, config):
"""Remember succeeded entries"""
if config is False:
log.debug('disabled')
return
fields = self.fields
if isinstance(config, list):
fields.extend(config)
for entry in task.accepted:
self.learn(task, entry, fields=fields, local=config == 'local')
# verbose if in learning mode
if task.options.learn:
log.info("Learned '%s' (will skip this in the future)" % (entry['title']))
def learn(self, task, entry, fields=None, reason=None, local=False):
"""Marks entry as seen"""
# no explicit fields given, use default
if not fields:
fields = self.fields
se = SeenEntry(entry['title'], unicode(task.name), reason, local)
remembered = []
for field in fields:
if not field in entry:
continue
# removes duplicate values (eg. url, original_url are usually same)
if entry[field] in remembered:
continue
remembered.append(entry[field])
sf = SeenField(unicode(field), unicode(entry[field]))
se.fields.append(sf)
log.debug("Learned '%s' (field: %s)" % (entry[field], field))
# Only add the entry to the session if it has one of the required fields
if se.fields:
task.session.add(se)
def forget(self, task, title):
"""Forget SeenEntry with :title:. Return True if forgotten."""
se = task.session.query(SeenEntry).filter(SeenEntry.title == title).first()
if se:
log.debug("Forgotten '%s' (%s fields)" % (title, len(se.fields)))
task.session.delete(se)
return True
@event('manager.db_cleanup')
def db_cleanup(session):
log.debug('TODO: Disabled because of ticket #1321')
return
# Remove seen fields over a year old
result = session.query(SeenField).filter(SeenField.added < datetime.now() - timedelta(days=365)).delete()
if result:
log.verbose('Removed %d seen fields older than 1 year.' % result)
def do_cli(manager, options):
if options.seen_action == 'forget':
seen_forget(manager, options)
elif options.seen_action == 'add':
seen_add(options)
elif options.seen_action == 'search':
seen_search(options)
def seen_forget(manager, options):
forget_name = options.forget_value
if is_imdb_url(forget_name):
imdb_id = extract_id(forget_name)
if imdb_id:
forget_name = imdb_id
count, fcount = forget(forget_name)
console('Removed %s titles (%s fields)' % (count, fcount))
manager.config_changed()
def seen_add(options):
seen_name = options.add_value
if is_imdb_url(seen_name):
imdb_id = extract_id(seen_name)
if imdb_id:
seen_name = imdb_id
with contextlib.closing(Session()) as session:
se = SeenEntry(seen_name, 'cli_seen')
sf = SeenField('cli_seen', seen_name)
se.fields.append(sf)
session.add(se)
session.commit()
console('Added %s as seen. This will affect all tasks.' % seen_name)
def seen_search(options):
session = Session()
try:
search_term = '%' + options.search_term + '%'
seen_entries = (session.query(SeenEntry).join(SeenField).
filter(SeenField.value.like(search_term)).order_by(SeenField.added).all())
for se in seen_entries:
console('ID: %s Name: %s Task: %s Added: %s' % (se.id, se.title, se.task, se.added.strftime('%c')))
for sf in se.fields:
console(' %s: %s' % (sf.field, sf.value))
console('')
if not seen_entries:
console('No results')
finally:
session.close()
@event('plugin.register')
def register_plugin():
plugin.register(FilterSeen, 'seen', builtin=True, api_ver=2)
@event('options.register')
def register_parser_arguments():
parser = options.register_command('seen', do_cli, help='view or forget entries remembered by the seen plugin')
subparsers = parser.add_subparsers(dest='seen_action', metavar='<action>')
forget_parser = subparsers.add_parser('forget', help='forget entry or entire task from seen plugin database')
forget_parser.add_argument('forget_value', metavar='<value>',
help='title or url of entry to forget, or name of task to forget')
add_parser = subparsers.add_parser('add', help='add a title or url to the seen database')
add_parser.add_argument('add_value', metavar='<value>', help='the title or url to add')
search_parser = subparsers.add_parser('search', help='search text from the seen database')
search_parser.add_argument('search_term', metavar='<search term>')
|
X-dark/Flexget
|
flexget/plugins/filter/seen.py
|
Python
|
mit
| 11,743
|
from __future__ import absolute_import
from django.db import models
from sentry.db.models import BaseManager, FlexibleForeignKey
from . import AvatarBase
class UserAvatar(AvatarBase):
"""
A UserAvatar associates a User with their avatar photo File
and contains their preferences for avatar type.
"""
AVATAR_TYPES = ((0, u"letter_avatar"), (1, u"upload"), (2, u"gravatar"))
FILE_TYPE = "avatar.file"
user = FlexibleForeignKey("sentry.User", unique=True, related_name="avatar")
avatar_type = models.PositiveSmallIntegerField(default=0, choices=AVATAR_TYPES)
objects = BaseManager(cache_fields=["user"])
class Meta:
app_label = "sentry"
db_table = "sentry_useravatar"
def get_cache_key(self, size):
return "avatar:%s:%s" % (self.user_id, size)
|
beeftornado/sentry
|
src/sentry/models/useravatar.py
|
Python
|
bsd-3-clause
| 820
|
"""Functions to plot epochs data
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Denis Engemann <denis.engemann@gmail.com>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
# Eric Larson <larson.eric.d@gmail.com>
# Jaakko Leppakangas <jaeilepp@student.jyu.fi>
#
# License: Simplified BSD
from functools import partial
import copy
import warnings
import numpy as np
from ..utils import verbose, get_config, set_config, logger
from ..io.pick import pick_types, channel_type
from ..io.proj import setup_proj
from ..fixes import Counter, _in1d
from ..time_frequency import psd_multitaper
from .utils import (tight_layout, figure_nobar, _toggle_proj, _toggle_options,
_layout_figure, _setup_vmin_vmax, _channels_changed,
_plot_raw_onscroll, _onclick_help, plt_show)
from ..defaults import _handle_default
def plot_epochs_image(epochs, picks=None, sigma=0., vmin=None,
vmax=None, colorbar=True, order=None, show=True,
units=None, scalings=None, cmap='RdBu_r',
fig=None, overlay_times=None):
"""Plot Event Related Potential / Fields image
Parameters
----------
epochs : instance of Epochs
The epochs
picks : int | array-like of int | None
The indices of the channels to consider. If None, the first
five good channels are plotted.
sigma : float
The standard deviation of the Gaussian smoothing to apply along
the epoch axis to apply in the image. If 0., no smoothing is applied.
vmin : float
The min value in the image. The unit is uV for EEG channels,
fT for magnetometers and fT/cm for gradiometers
vmax : float
The max value in the image. The unit is uV for EEG channels,
fT for magnetometers and fT/cm for gradiometers
colorbar : bool
Display or not a colorbar
order : None | array of int | callable
If not None, order is used to reorder the epochs on the y-axis
of the image. If it's an array of int it should be of length
the number of good epochs. If it's a callable the arguments
passed are the times vector and the data as 2d array
(data.shape[1] == len(times)
show : bool
Show figure if True.
units : dict | None
The units of the channel types used for axes lables. If None,
defaults to `units=dict(eeg='uV', grad='fT/cm', mag='fT')`.
scalings : dict | None
The scalings of the channel types to be applied for plotting.
If None, defaults to `scalings=dict(eeg=1e6, grad=1e13, mag=1e15,
eog=1e6)`
cmap : matplotlib colormap
Colormap.
fig : matplotlib figure | None
Figure instance to draw the image to. Figure must contain two axes for
drawing the single trials and evoked responses. If None a new figure is
created. Defaults to None.
overlay_times : array-like, shape (n_epochs,) | None
If not None the parameter is interpreted as time instants in seconds
and is added to the image. It is typically useful to display reaction
times. Note that it is defined with respect to the order
of epochs such that overlay_times[0] corresponds to epochs[0].
Returns
-------
figs : the list of matplotlib figures
One figure per channel displayed
"""
from scipy import ndimage
units = _handle_default('units', units)
scalings = _handle_default('scalings', scalings)
import matplotlib.pyplot as plt
if picks is None:
picks = pick_types(epochs.info, meg=True, eeg=True, ref_meg=False,
exclude='bads')[:5]
if set(units.keys()) != set(scalings.keys()):
raise ValueError('Scalings and units must have the same keys.')
picks = np.atleast_1d(picks)
if fig is not None and len(picks) > 1:
raise ValueError('Only single pick can be drawn to a figure.')
evoked = epochs.average(picks)
data = epochs.get_data()[:, picks, :]
scale_vmin = True if vmin is None else False
scale_vmax = True if vmax is None else False
vmin, vmax = _setup_vmin_vmax(data, vmin, vmax)
if overlay_times is not None and len(overlay_times) != len(data):
raise ValueError('size of overlay_times parameter (%s) do not '
'match the number of epochs (%s).'
% (len(overlay_times), len(data)))
if overlay_times is not None:
overlay_times = np.array(overlay_times)
times_min = np.min(overlay_times)
times_max = np.max(overlay_times)
if ((times_min < epochs.tmin) or (times_max > epochs.tmax)):
warnings.warn('Some values in overlay_times fall outside of '
'the epochs time interval (between %s s and %s s)' %
(epochs.tmin, epochs.tmax))
figs = list()
for i, (this_data, idx) in enumerate(zip(np.swapaxes(data, 0, 1), picks)):
if fig is None:
this_fig = plt.figure()
else:
this_fig = fig
figs.append(this_fig)
ch_type = channel_type(epochs.info, idx)
if ch_type not in scalings:
# We know it's not in either scalings or units since keys match
raise KeyError('%s type not in scalings and units' % ch_type)
this_data *= scalings[ch_type]
this_order = order
if callable(order):
this_order = order(epochs.times, this_data)
if this_order is not None and (len(this_order) != len(this_data)):
raise ValueError('size of order parameter (%s) does not '
'match the number of epochs (%s).'
% (len(this_order), len(this_data)))
this_overlay_times = None
if overlay_times is not None:
this_overlay_times = overlay_times
if this_order is not None:
this_order = np.asarray(this_order)
this_data = this_data[this_order]
if this_overlay_times is not None:
this_overlay_times = this_overlay_times[this_order]
if sigma > 0.:
this_data = ndimage.gaussian_filter1d(this_data, sigma=sigma,
axis=0)
plt.figure(this_fig.number)
ax1 = plt.subplot2grid((3, 10), (0, 0), colspan=9, rowspan=2)
if scale_vmin:
vmin *= scalings[ch_type]
if scale_vmax:
vmax *= scalings[ch_type]
im = ax1.imshow(this_data,
extent=[1e3 * epochs.times[0], 1e3 * epochs.times[-1],
0, len(data)],
aspect='auto', origin='lower', interpolation='nearest',
vmin=vmin, vmax=vmax, cmap=cmap)
if this_overlay_times is not None:
plt.plot(1e3 * this_overlay_times, 0.5 + np.arange(len(this_data)),
'k', linewidth=2)
ax2 = plt.subplot2grid((3, 10), (2, 0), colspan=9, rowspan=1)
if colorbar:
ax3 = plt.subplot2grid((3, 10), (0, 9), colspan=1, rowspan=3)
ax1.set_title(epochs.ch_names[idx])
ax1.set_ylabel('Epochs')
ax1.axis('auto')
ax1.axis('tight')
ax1.axvline(0, color='m', linewidth=3, linestyle='--')
evoked_data = scalings[ch_type] * evoked.data[i]
ax2.plot(1e3 * evoked.times, evoked_data)
ax2.set_xlabel('Time (ms)')
ax2.set_xlim([1e3 * evoked.times[0], 1e3 * evoked.times[-1]])
ax2.set_ylabel(units[ch_type])
evoked_vmin = min(evoked_data) * 1.1 if scale_vmin else vmin
evoked_vmax = max(evoked_data) * 1.1 if scale_vmax else vmax
if scale_vmin or scale_vmax:
evoked_vmax = max(np.abs([evoked_vmax, evoked_vmin]))
evoked_vmin = -evoked_vmax
ax2.set_ylim([evoked_vmin, evoked_vmax])
ax2.axvline(0, color='m', linewidth=3, linestyle='--')
if colorbar:
plt.colorbar(im, cax=ax3)
tight_layout(fig=this_fig)
plt_show(show)
return figs
def plot_drop_log(drop_log, threshold=0, n_max_plot=20, subject='Unknown',
color=(0.9, 0.9, 0.9), width=0.8, ignore=('IGNORED',),
show=True):
"""Show the channel stats based on a drop_log from Epochs
Parameters
----------
drop_log : list of lists
Epoch drop log from Epochs.drop_log.
threshold : float
The percentage threshold to use to decide whether or not to
plot. Default is zero (always plot).
n_max_plot : int
Maximum number of channels to show stats for.
subject : str
The subject name to use in the title of the plot.
color : tuple | str
Color to use for the bars.
width : float
Width of the bars.
ignore : list
The drop reasons to ignore.
show : bool
Show figure if True.
Returns
-------
fig : Instance of matplotlib.figure.Figure
The figure.
"""
import matplotlib.pyplot as plt
from ..epochs import _drop_log_stats
perc = _drop_log_stats(drop_log, ignore)
scores = Counter([ch for d in drop_log for ch in d if ch not in ignore])
ch_names = np.array(list(scores.keys()))
fig = plt.figure()
if perc < threshold or len(ch_names) == 0:
plt.text(0, 0, 'No drops')
return fig
n_used = 0
for d in drop_log: # "d" is the list of drop reasons for each epoch
if len(d) == 0 or any(ch not in ignore for ch in d):
n_used += 1 # number of epochs not ignored
counts = 100 * np.array(list(scores.values()), dtype=float) / n_used
n_plot = min(n_max_plot, len(ch_names))
order = np.flipud(np.argsort(counts))
plt.title('%s: %0.1f%%' % (subject, perc))
x = np.arange(n_plot)
plt.bar(x, counts[order[:n_plot]], color=color, width=width)
plt.xticks(x + width / 2.0, ch_names[order[:n_plot]], rotation=45,
horizontalalignment='right')
plt.tick_params(axis='x', which='major', labelsize=10)
plt.ylabel('% of epochs rejected')
plt.xlim((-width / 2.0, (n_plot - 1) + width * 3 / 2))
plt.grid(True, axis='y')
plt_show(show)
return fig
def _draw_epochs_axes(epoch_idx, good_ch_idx, bad_ch_idx, data, times, axes,
title_str, axes_handler):
"""Aux functioin"""
this = axes_handler[0]
for ii, data_, ax in zip(epoch_idx, data, axes):
for l, d in zip(ax.lines, data_[good_ch_idx]):
l.set_data(times, d)
if bad_ch_idx is not None:
bad_lines = [ax.lines[k] for k in bad_ch_idx]
for l, d in zip(bad_lines, data_[bad_ch_idx]):
l.set_data(times, d)
if title_str is not None:
ax.set_title(title_str % ii, fontsize=12)
ax.set_ylim(data.min(), data.max())
ax.set_yticks(list())
ax.set_xticks(list())
if vars(ax)[this]['reject'] is True:
# memorizing reject
for l in ax.lines:
l.set_color((0.8, 0.8, 0.8))
ax.get_figure().canvas.draw()
else:
# forgetting previous reject
for k in axes_handler:
if k == this:
continue
if vars(ax).get(k, {}).get('reject', None) is True:
for l in ax.lines[:len(good_ch_idx)]:
l.set_color('k')
if bad_ch_idx is not None:
for l in ax.lines[-len(bad_ch_idx):]:
l.set_color('r')
ax.get_figure().canvas.draw()
break
def _epochs_navigation_onclick(event, params):
"""Aux function"""
import matplotlib.pyplot as plt
p = params
here = None
if event.inaxes == p['back'].ax:
here = 1
elif event.inaxes == p['next'].ax:
here = -1
elif event.inaxes == p['reject-quit'].ax:
if p['reject_idx']:
p['epochs'].drop_epochs(p['reject_idx'])
plt.close(p['fig'])
plt.close(event.inaxes.get_figure())
if here is not None:
p['idx_handler'].rotate(here)
p['axes_handler'].rotate(here)
this_idx = p['idx_handler'][0]
_draw_epochs_axes(this_idx, p['good_ch_idx'], p['bad_ch_idx'],
p['data'][this_idx],
p['times'], p['axes'], p['title_str'],
p['axes_handler'])
# XXX don't ask me why
p['axes'][0].get_figure().canvas.draw()
def _epochs_axes_onclick(event, params):
"""Aux function"""
reject_color = (0.8, 0.8, 0.8)
ax = event.inaxes
if event.inaxes is None:
return
p = params
here = vars(ax)[p['axes_handler'][0]]
if here.get('reject', None) is False:
idx = here['idx']
if idx not in p['reject_idx']:
p['reject_idx'].append(idx)
for l in ax.lines:
l.set_color(reject_color)
here['reject'] = True
elif here.get('reject', None) is True:
idx = here['idx']
if idx in p['reject_idx']:
p['reject_idx'].pop(p['reject_idx'].index(idx))
good_lines = [ax.lines[k] for k in p['good_ch_idx']]
for l in good_lines:
l.set_color('k')
if p['bad_ch_idx'] is not None:
bad_lines = ax.lines[-len(p['bad_ch_idx']):]
for l in bad_lines:
l.set_color('r')
here['reject'] = False
ax.get_figure().canvas.draw()
def plot_epochs(epochs, picks=None, scalings=None, n_epochs=20,
n_channels=20, title=None, show=True, block=False):
""" Visualize epochs
Bad epochs can be marked with a left click on top of the epoch. Bad
channels can be selected by clicking the channel name on the left side of
the main axes. Calling this function drops all the selected bad epochs as
well as bad epochs marked beforehand with rejection parameters.
Parameters
----------
epochs : instance of Epochs
The epochs object
picks : array-like of int | None
Channels to be included. If None only good data channels are used.
Defaults to None
scalings : dict | None
Scale factors for the traces. If None, defaults to::
dict(mag=1e-12, grad=4e-11, eeg=20e-6, eog=150e-6, ecg=5e-4,
emg=1e-3, ref_meg=1e-12, misc=1e-3, stim=1, resp=1, chpi=1e-4)
n_epochs : int
The number of epochs per view. Defaults to 20.
n_channels : int
The number of channels per view. Defaults to 20.
title : str | None
The title of the window. If None, epochs name will be displayed.
Defaults to None.
show : bool
Show figure if True. Defaults to True
block : bool
Whether to halt program execution until the figure is closed.
Useful for rejecting bad trials on the fly by clicking on an epoch.
Defaults to False.
Returns
-------
fig : Instance of matplotlib.figure.Figure
The figure.
Notes
-----
The arrow keys (up/down/left/right) can be used to navigate between
channels and epochs and the scaling can be adjusted with - and + (or =)
keys, but this depends on the backend matplotlib is configured to use
(e.g., mpl.use(``TkAgg``) should work). Full screen mode can be toggled
with f11 key. The amount of epochs and channels per view can be adjusted
with home/end and page down/page up keys. Butterfly plot can be toggled
with ``b`` key. Right mouse click adds a vertical line to the plot.
"""
epochs.drop_bad_epochs()
scalings = _handle_default('scalings_plot_raw', scalings)
projs = epochs.info['projs']
params = {'epochs': epochs,
'info': copy.deepcopy(epochs.info),
'bad_color': (0.8, 0.8, 0.8),
't_start': 0,
'histogram': None}
params['label_click_fun'] = partial(_pick_bad_channels, params=params)
_prepare_mne_browse_epochs(params, projs, n_channels, n_epochs, scalings,
title, picks)
_prepare_projectors(params)
_layout_figure(params)
callback_close = partial(_close_event, params=params)
params['fig'].canvas.mpl_connect('close_event', callback_close)
try:
plt_show(show, block=block)
except TypeError: # not all versions have this
plt_show(show)
return params['fig']
@verbose
def plot_epochs_psd(epochs, fmin=0, fmax=np.inf, tmin=None, tmax=None,
proj=False, bandwidth=None, adaptive=False, low_bias=True,
normalization='length', picks=None, ax=None, color='black',
area_mode='std', area_alpha=0.33, dB=True, n_jobs=1,
show=True, verbose=None):
"""Plot the power spectral density across epochs
Parameters
----------
epochs : instance of Epochs
The epochs object
fmin : float
Start frequency to consider.
fmax : float
End frequency to consider.
tmin : float | None
Start time to consider.
tmax : float | None
End time to consider.
proj : bool
Apply projection.
bandwidth : float
The bandwidth of the multi taper windowing function in Hz. The default
value is a window half-bandwidth of 4.
adaptive : bool
Use adaptive weights to combine the tapered spectra into PSD
(slow, use n_jobs >> 1 to speed up computation).
low_bias : bool
Only use tapers with more than 90% spectral concentration within
bandwidth.
normalization : str
Either "full" or "length" (default). If "full", the PSD will
be normalized by the sampling rate as well as the length of
the signal (as in nitime).
picks : array-like of int | None
List of channels to use.
ax : instance of matplotlib Axes | None
Axes to plot into. If None, axes will be created.
color : str | tuple
A matplotlib-compatible color to use.
area_mode : str | None
Mode for plotting area. If 'std', the mean +/- 1 STD (across channels)
will be plotted. If 'range', the min and max (across channels) will be
plotted. Bad channels will be excluded from these calculations.
If None, no area will be plotted.
area_alpha : float
Alpha for the area.
dB : bool
If True, transform data to decibels.
n_jobs : int
Number of jobs to run in parallel.
show : bool
Show figure if True.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
fig : instance of matplotlib figure
Figure distributing one image per channel across sensor topography.
"""
from .raw import _set_psd_plot_params
fig, picks_list, titles_list, ax_list, make_label = _set_psd_plot_params(
epochs.info, proj, picks, ax, area_mode)
for ii, (picks, title, ax) in enumerate(zip(picks_list, titles_list,
ax_list)):
psds, freqs = psd_multitaper(epochs, picks=picks, fmin=fmin,
fmax=fmax, tmin=tmin, tmax=tmax,
bandwidth=bandwidth, adaptive=adaptive,
low_bias=low_bias,
normalization=normalization, proj=proj,
n_jobs=n_jobs)
# Convert PSDs to dB
if dB:
psds = 10 * np.log10(psds)
unit = 'dB'
else:
unit = 'power'
# mean across epochs and channels
psd_mean = np.mean(psds, axis=0).mean(axis=0)
if area_mode == 'std':
# std across channels
psd_std = np.std(np.mean(psds, axis=0), axis=0)
hyp_limits = (psd_mean - psd_std, psd_mean + psd_std)
elif area_mode == 'range':
hyp_limits = (np.min(np.mean(psds, axis=0), axis=0),
np.max(np.mean(psds, axis=0), axis=0))
else: # area_mode is None
hyp_limits = None
ax.plot(freqs, psd_mean, color=color)
if hyp_limits is not None:
ax.fill_between(freqs, hyp_limits[0], y2=hyp_limits[1],
color=color, alpha=area_alpha)
if make_label:
if ii == len(picks_list) - 1:
ax.set_xlabel('Freq (Hz)')
if ii == len(picks_list) // 2:
ax.set_ylabel('Power Spectral Density (%s/Hz)' % unit)
ax.set_title(title)
ax.set_xlim(freqs[0], freqs[-1])
if make_label:
tight_layout(pad=0.1, h_pad=0.1, w_pad=0.1, fig=fig)
plt_show(show)
return fig
def _prepare_mne_browse_epochs(params, projs, n_channels, n_epochs, scalings,
title, picks, order=None):
"""Helper for setting up the mne_browse_epochs window."""
import matplotlib.pyplot as plt
import matplotlib as mpl
from matplotlib.collections import LineCollection
from matplotlib.colors import colorConverter
epochs = params['epochs']
if picks is None:
picks = _handle_picks(epochs)
if len(picks) < 1:
raise RuntimeError('No appropriate channels found. Please'
' check your picks')
picks = sorted(picks)
# Reorganize channels
inds = list()
types = list()
for t in ['grad', 'mag']:
idxs = pick_types(params['info'], meg=t, ref_meg=False, exclude=[])
if len(idxs) < 1:
continue
mask = _in1d(idxs, picks, assume_unique=True)
inds.append(idxs[mask])
types += [t] * len(inds[-1])
pick_kwargs = dict(meg=False, ref_meg=False, exclude=[])
if order is None:
order = ['eeg', 'eog', 'ecg', 'emg', 'ref_meg', 'stim', 'resp', 'misc',
'chpi', 'syst', 'ias', 'exci']
for ch_type in order:
pick_kwargs[ch_type] = True
idxs = pick_types(params['info'], **pick_kwargs)
if len(idxs) < 1:
continue
mask = _in1d(idxs, picks, assume_unique=True)
inds.append(idxs[mask])
types += [ch_type] * len(inds[-1])
pick_kwargs[ch_type] = False
inds = np.concatenate(inds).astype(int)
if not len(inds) == len(picks):
raise RuntimeError('Some channels not classified. Please'
' check your picks')
ch_names = [params['info']['ch_names'][x] for x in inds]
# set up plotting
size = get_config('MNE_BROWSE_RAW_SIZE')
n_epochs = min(n_epochs, len(epochs.events))
duration = len(epochs.times) * n_epochs
n_channels = min(n_channels, len(picks))
if size is not None:
size = size.split(',')
size = tuple(float(s) for s in size)
if title is None:
title = epochs.name
if epochs.name is None or len(title) == 0:
title = ''
fig = figure_nobar(facecolor='w', figsize=size, dpi=80)
fig.canvas.set_window_title('mne_browse_epochs')
ax = plt.subplot2grid((10, 15), (0, 1), colspan=13, rowspan=9)
ax.annotate(title, xy=(0.5, 1), xytext=(0, ax.get_ylim()[1] + 15),
ha='center', va='bottom', size=12, xycoords='axes fraction',
textcoords='offset points')
color = _handle_default('color', None)
ax.axis([0, duration, 0, 200])
ax2 = ax.twiny()
ax2.set_zorder(-1)
ax2.axis([0, duration, 0, 200])
ax_hscroll = plt.subplot2grid((10, 15), (9, 1), colspan=13)
ax_hscroll.get_yaxis().set_visible(False)
ax_hscroll.set_xlabel('Epochs')
ax_vscroll = plt.subplot2grid((10, 15), (0, 14), rowspan=9)
ax_vscroll.set_axis_off()
ax_vscroll.add_patch(mpl.patches.Rectangle((0, 0), 1, len(picks),
facecolor='w', zorder=2))
ax_help_button = plt.subplot2grid((10, 15), (9, 0), colspan=1)
help_button = mpl.widgets.Button(ax_help_button, 'Help')
help_button.on_clicked(partial(_onclick_help, params=params))
# populate vertical and horizontal scrollbars
for ci in range(len(picks)):
if ch_names[ci] in params['info']['bads']:
this_color = params['bad_color']
else:
this_color = color[types[ci]]
ax_vscroll.add_patch(mpl.patches.Rectangle((0, ci), 1, 1,
facecolor=this_color,
edgecolor=this_color,
zorder=3))
vsel_patch = mpl.patches.Rectangle((0, 0), 1, n_channels, alpha=0.5,
edgecolor='w', facecolor='w', zorder=4)
ax_vscroll.add_patch(vsel_patch)
ax_vscroll.set_ylim(len(types), 0)
ax_vscroll.set_title('Ch.')
# populate colors list
type_colors = [colorConverter.to_rgba(color[c]) for c in types]
colors = list()
for color_idx in range(len(type_colors)):
colors.append([type_colors[color_idx]] * len(epochs.events))
lines = list()
n_times = len(epochs.times)
for ch_idx in range(n_channels):
if len(colors) - 1 < ch_idx:
break
lc = LineCollection(list(), antialiased=False, linewidths=0.5,
zorder=2, picker=3.)
ax.add_collection(lc)
lines.append(lc)
times = epochs.times
data = np.zeros((params['info']['nchan'], len(times) * n_epochs))
ylim = (25., 0.) # Hardcoded 25 because butterfly has max 5 rows (5*5=25).
# make shells for plotting traces
offset = ylim[0] / n_channels
offsets = np.arange(n_channels) * offset + (offset / 2.)
times = np.arange(len(times) * len(epochs.events))
epoch_times = np.arange(0, len(times), n_times)
ax.set_yticks(offsets)
ax.set_ylim(ylim)
ticks = epoch_times + 0.5 * n_times
ax.set_xticks(ticks)
ax2.set_xticks(ticks[:n_epochs])
labels = list(range(1, len(ticks) + 1)) # epoch numbers
ax.set_xticklabels(labels)
ax2.set_xticklabels(labels)
xlim = epoch_times[-1] + len(epochs.times)
ax_hscroll.set_xlim(0, xlim)
vertline_t = ax_hscroll.text(0, 1, '', color='y', va='bottom', ha='right')
# fit horizontal scroll bar ticks
hscroll_ticks = np.arange(0, xlim, xlim / 7.0)
hscroll_ticks = np.append(hscroll_ticks, epoch_times[-1])
hticks = list()
for tick in hscroll_ticks:
hticks.append(epoch_times.flat[np.abs(epoch_times - tick).argmin()])
hlabels = [x / n_times + 1 for x in hticks]
ax_hscroll.set_xticks(hticks)
ax_hscroll.set_xticklabels(hlabels)
for epoch_idx in range(len(epoch_times)):
ax_hscroll.add_patch(mpl.patches.Rectangle((epoch_idx * n_times, 0),
n_times, 1, facecolor='w',
edgecolor='w', alpha=0.6))
hsel_patch = mpl.patches.Rectangle((0, 0), duration, 1,
edgecolor='k',
facecolor=(0.75, 0.75, 0.75),
alpha=0.25, linewidth=1, clip_on=False)
ax_hscroll.add_patch(hsel_patch)
text = ax.text(0, 0, 'blank', zorder=2, verticalalignment='baseline',
ha='left', fontweight='bold')
text.set_visible(False)
params.update({'fig': fig,
'ax': ax,
'ax2': ax2,
'ax_hscroll': ax_hscroll,
'ax_vscroll': ax_vscroll,
'vsel_patch': vsel_patch,
'hsel_patch': hsel_patch,
'lines': lines,
'projs': projs,
'ch_names': ch_names,
'n_channels': n_channels,
'n_epochs': n_epochs,
'scalings': scalings,
'duration': duration,
'ch_start': 0,
'colors': colors,
'def_colors': type_colors, # don't change at runtime
'picks': picks,
'bads': np.array(list(), dtype=int),
'data': data,
'times': times,
'epoch_times': epoch_times,
'offsets': offsets,
'labels': labels,
'scale_factor': 1.0,
'butterfly_scale': 1.0,
'fig_proj': None,
'types': np.array(types),
'inds': inds,
'vert_lines': list(),
'vertline_t': vertline_t,
'butterfly': False,
'text': text,
'ax_help_button': ax_help_button, # needed for positioning
'help_button': help_button, # reference needed for clicks
'fig_options': None,
'settings': [True, True, True, True],
'image_plot': None})
params['plot_fun'] = partial(_plot_traces, params=params)
# callbacks
callback_scroll = partial(_plot_onscroll, params=params)
fig.canvas.mpl_connect('scroll_event', callback_scroll)
callback_click = partial(_mouse_click, params=params)
fig.canvas.mpl_connect('button_press_event', callback_click)
callback_key = partial(_plot_onkey, params=params)
fig.canvas.mpl_connect('key_press_event', callback_key)
callback_resize = partial(_resize_event, params=params)
fig.canvas.mpl_connect('resize_event', callback_resize)
fig.canvas.mpl_connect('pick_event', partial(_onpick, params=params))
params['callback_key'] = callback_key
# Draw event lines for the first time.
_plot_vert_lines(params)
def _prepare_projectors(params):
""" Helper for setting up the projectors for epochs browser """
import matplotlib.pyplot as plt
import matplotlib as mpl
epochs = params['epochs']
projs = params['projs']
if len(projs) > 0 and not epochs.proj:
ax_button = plt.subplot2grid((10, 15), (9, 14))
opt_button = mpl.widgets.Button(ax_button, 'Proj')
callback_option = partial(_toggle_options, params=params)
opt_button.on_clicked(callback_option)
params['opt_button'] = opt_button
params['ax_button'] = ax_button
# As here code is shared with plot_evoked, some extra steps:
# first the actual plot update function
params['plot_update_proj_callback'] = _plot_update_epochs_proj
# then the toggle handler
callback_proj = partial(_toggle_proj, params=params)
# store these for use by callbacks in the options figure
params['callback_proj'] = callback_proj
callback_proj('none')
def _plot_traces(params):
""" Helper for plotting concatenated epochs """
params['text'].set_visible(False)
ax = params['ax']
butterfly = params['butterfly']
if butterfly:
ch_start = 0
n_channels = len(params['picks'])
data = params['data'] * params['butterfly_scale']
else:
ch_start = params['ch_start']
n_channels = params['n_channels']
data = params['data'] * params['scale_factor']
offsets = params['offsets']
lines = params['lines']
epochs = params['epochs']
n_times = len(epochs.times)
tick_list = list()
start_idx = int(params['t_start'] / n_times)
end = params['t_start'] + params['duration']
end_idx = int(end / n_times)
xlabels = params['labels'][start_idx:]
event_ids = params['epochs'].events[:, 2]
params['ax2'].set_xticklabels(event_ids[start_idx:])
ax.set_xticklabels(xlabels)
ylabels = ax.yaxis.get_ticklabels()
# do the plotting
for line_idx in range(n_channels):
ch_idx = line_idx + ch_start
if line_idx >= len(lines):
break
elif ch_idx < len(params['ch_names']):
if butterfly:
ch_type = params['types'][ch_idx]
if ch_type == 'grad':
offset = offsets[0]
elif ch_type == 'mag':
offset = offsets[1]
elif ch_type == 'eeg':
offset = offsets[2]
elif ch_type == 'eog':
offset = offsets[3]
elif ch_type == 'ecg':
offset = offsets[4]
else:
lines[line_idx].set_segments(list())
else:
tick_list += [params['ch_names'][ch_idx]]
offset = offsets[line_idx]
this_data = data[ch_idx]
# subtraction here gets correct orientation for flipped ylim
ydata = offset - this_data
xdata = params['times'][:params['duration']]
num_epochs = np.min([params['n_epochs'],
len(epochs.events)])
segments = np.split(np.array((xdata, ydata)).T, num_epochs)
ch_name = params['ch_names'][ch_idx]
if ch_name in params['info']['bads']:
if not butterfly:
this_color = params['bad_color']
ylabels[line_idx].set_color(this_color)
this_color = np.tile((params['bad_color']), (num_epochs, 1))
for bad_idx in params['bads']:
if bad_idx < start_idx or bad_idx > end_idx:
continue
this_color[bad_idx - start_idx] = (1., 0., 0.)
lines[line_idx].set_zorder(1)
else:
this_color = params['colors'][ch_idx][start_idx:end_idx]
lines[line_idx].set_zorder(2)
if not butterfly:
ylabels[line_idx].set_color('black')
lines[line_idx].set_segments(segments)
lines[line_idx].set_color(this_color)
else:
lines[line_idx].set_segments(list())
# finalize plot
ax.set_xlim(params['times'][0], params['times'][0] + params['duration'],
False)
params['ax2'].set_xlim(params['times'][0],
params['times'][0] + params['duration'], False)
if butterfly:
factor = -1. / params['butterfly_scale']
labels = np.empty(20, dtype='S15')
labels.fill('')
ticks = ax.get_yticks()
idx_offset = 1
if 'grad' in params['types']:
labels[idx_offset + 1] = '0.00'
for idx in [idx_offset, idx_offset + 2]:
labels[idx] = '{0:.2f}'.format((ticks[idx] - offsets[0]) *
params['scalings']['grad'] *
1e13 * factor)
idx_offset += 4
if 'mag' in params['types']:
labels[idx_offset + 1] = '0.00'
for idx in [idx_offset, idx_offset + 2]:
labels[idx] = '{0:.2f}'.format((ticks[idx] - offsets[1]) *
params['scalings']['mag'] *
1e15 * factor)
idx_offset += 4
if 'eeg' in params['types']:
labels[idx_offset + 1] = '0.00'
for idx in [idx_offset, idx_offset + 2]:
labels[idx] = '{0:.2f}'.format((ticks[idx] - offsets[2]) *
params['scalings']['eeg'] *
1e6 * factor)
idx_offset += 4
if 'eog' in params['types']:
labels[idx_offset + 1] = '0.00'
for idx in [idx_offset, idx_offset + 2]:
labels[idx] = '{0:.2f}'.format((ticks[idx] - offsets[3]) *
params['scalings']['eog'] *
1e6 * factor)
idx_offset += 4
if 'ecg' in params['types']:
labels[idx_offset + 1] = '0.00'
for idx in [idx_offset, idx_offset + 2]:
labels[idx] = '{0:.2f}'.format((ticks[idx] - offsets[4]) *
params['scalings']['ecg'] *
1e6 * factor)
ax.set_yticklabels(labels, fontsize=12, color='black')
else:
ax.set_yticklabels(tick_list, fontsize=12)
params['vsel_patch'].set_y(ch_start)
params['fig'].canvas.draw()
# XXX This is a hack to make sure this figure gets drawn last
# so that when matplotlib goes to calculate bounds we don't get a
# CGContextRef error on the MacOSX backend :(
if params['fig_proj'] is not None:
params['fig_proj'].canvas.draw()
def _plot_update_epochs_proj(params, bools=None):
"""Helper only needs to be called when proj is changed"""
if bools is not None:
inds = np.where(bools)[0]
params['info']['projs'] = [copy.deepcopy(params['projs'][ii])
for ii in inds]
params['proj_bools'] = bools
params['projector'], _ = setup_proj(params['info'], add_eeg_ref=False,
verbose=False)
start = int(params['t_start'] / len(params['epochs'].times))
n_epochs = params['n_epochs']
end = start + n_epochs
data = np.concatenate(params['epochs'][start:end].get_data(), axis=1)
if params['projector'] is not None:
data = np.dot(params['projector'], data)
types = params['types']
for pick, ind in enumerate(params['inds']):
params['data'][pick] = data[ind] / params['scalings'][types[pick]]
params['plot_fun']()
def _handle_picks(epochs):
"""Aux function to handle picks."""
if any('ICA' in k for k in epochs.ch_names):
picks = pick_types(epochs.info, misc=True, ref_meg=False,
exclude=[])
else:
picks = pick_types(epochs.info, meg=True, eeg=True, eog=True, ecg=True,
ref_meg=False, exclude=[])
return picks
def _plot_window(value, params):
"""Deal with horizontal shift of the viewport."""
max_times = len(params['times']) - params['duration']
if value > max_times:
value = len(params['times']) - params['duration']
if value < 0:
value = 0
if params['t_start'] != value:
params['t_start'] = value
params['hsel_patch'].set_x(value)
params['plot_update_proj_callback'](params)
def _plot_vert_lines(params):
""" Helper function for plotting vertical lines."""
ax = params['ax']
while len(ax.lines) > 0:
ax.lines.pop()
params['vert_lines'] = list()
params['vertline_t'].set_text('')
epochs = params['epochs']
if params['settings'][3]: # if zeroline visible
t_zero = np.where(epochs.times == 0.)[0]
if len(t_zero) == 1:
for event_idx in range(len(epochs.events)):
pos = [event_idx * len(epochs.times) + t_zero[0],
event_idx * len(epochs.times) + t_zero[0]]
ax.plot(pos, ax.get_ylim(), 'g', zorder=3, alpha=0.4)
for epoch_idx in range(len(epochs.events)):
pos = [epoch_idx * len(epochs.times), epoch_idx * len(epochs.times)]
ax.plot(pos, ax.get_ylim(), color='black', linestyle='--', zorder=1)
def _pick_bad_epochs(event, params):
"""Helper for selecting / dropping bad epochs"""
if 'ica' in params:
pos = (event.xdata, event.ydata)
_pick_bad_channels(pos, params)
return
n_times = len(params['epochs'].times)
start_idx = int(params['t_start'] / n_times)
xdata = event.xdata
xlim = event.inaxes.get_xlim()
epoch_idx = start_idx + int(xdata / (xlim[1] / params['n_epochs']))
total_epochs = len(params['epochs'].events)
if epoch_idx > total_epochs - 1:
return
# remove bad epoch
if epoch_idx in params['bads']:
params['bads'] = params['bads'][(params['bads'] != epoch_idx)]
for ch_idx in range(len(params['ch_names'])):
params['colors'][ch_idx][epoch_idx] = params['def_colors'][ch_idx]
params['ax_hscroll'].patches[epoch_idx].set_color('w')
params['ax_hscroll'].patches[epoch_idx].set_zorder(1)
params['plot_fun']()
return
# add bad epoch
params['bads'] = np.append(params['bads'], epoch_idx)
params['ax_hscroll'].patches[epoch_idx].set_color((1., 0., 0., 1.))
params['ax_hscroll'].patches[epoch_idx].set_zorder(2)
params['ax_hscroll'].patches[epoch_idx].set_edgecolor('w')
for ch_idx in range(len(params['ch_names'])):
params['colors'][ch_idx][epoch_idx] = (1., 0., 0., 1.)
params['plot_fun']()
def _pick_bad_channels(pos, params):
"""Helper function for selecting bad channels."""
text, ch_idx = _label2idx(params, pos)
if text is None:
return
if text in params['info']['bads']:
while text in params['info']['bads']:
params['info']['bads'].remove(text)
color = params['def_colors'][ch_idx]
params['ax_vscroll'].patches[ch_idx + 1].set_color(color)
else:
params['info']['bads'].append(text)
color = params['bad_color']
params['ax_vscroll'].patches[ch_idx + 1].set_color(color)
if 'ica' in params:
params['plot_fun']()
else:
params['plot_update_proj_callback'](params)
def _plot_onscroll(event, params):
"""Function to handle scroll events."""
if event.key == 'control':
if event.step < 0:
event.key = '-'
else:
event.key = '+'
_plot_onkey(event, params)
return
if params['butterfly']:
return
_plot_raw_onscroll(event, params, len(params['ch_names']))
def _mouse_click(event, params):
"""Function to handle mouse click events."""
if event.inaxes is None:
if params['butterfly'] or not params['settings'][0]:
return
ax = params['ax']
ylim = ax.get_ylim()
pos = ax.transData.inverted().transform((event.x, event.y))
if pos[0] > 0 or pos[1] < 0 or pos[1] > ylim[0]:
return
if event.button == 1: # left click
params['label_click_fun'](pos)
elif event.button == 3: # right click
if 'ica' not in params:
_, ch_idx = _label2idx(params, pos)
if ch_idx is None:
return
if channel_type(params['info'], ch_idx) not in ['mag', 'grad',
'eeg', 'eog']:
logger.info('Event related fields / potentials only '
'available for MEG and EEG channels.')
return
fig = plot_epochs_image(params['epochs'],
picks=params['inds'][ch_idx],
fig=params['image_plot'])[0]
params['image_plot'] = fig
elif event.button == 1: # left click
# vertical scroll bar changed
if event.inaxes == params['ax_vscroll']:
if params['butterfly']:
return
ch_start = max(int(event.ydata) - params['n_channels'] // 2, 0)
if params['ch_start'] != ch_start:
params['ch_start'] = ch_start
params['plot_fun']()
# horizontal scroll bar changed
elif event.inaxes == params['ax_hscroll']:
# find the closest epoch time
times = params['epoch_times']
offset = 0.5 * params['n_epochs'] * len(params['epochs'].times)
xdata = times.flat[np.abs(times - (event.xdata - offset)).argmin()]
_plot_window(xdata, params)
# main axes
elif event.inaxes == params['ax']:
_pick_bad_epochs(event, params)
elif event.inaxes == params['ax'] and event.button == 2: # middle click
params['fig'].canvas.draw()
if params['fig_proj'] is not None:
params['fig_proj'].canvas.draw()
elif event.inaxes == params['ax'] and event.button == 3: # right click
n_times = len(params['epochs'].times)
xdata = int(event.xdata % n_times)
prev_xdata = 0
if len(params['vert_lines']) > 0:
prev_xdata = params['vert_lines'][0][0].get_data()[0][0]
while len(params['vert_lines']) > 0:
params['ax'].lines.remove(params['vert_lines'][0][0])
params['vert_lines'].pop(0)
if prev_xdata == xdata: # lines removed
params['vertline_t'].set_text('')
params['plot_fun']()
return
ylim = params['ax'].get_ylim()
for epoch_idx in range(params['n_epochs']): # plot lines
pos = [epoch_idx * n_times + xdata, epoch_idx * n_times + xdata]
params['vert_lines'].append(params['ax'].plot(pos, ylim, 'y',
zorder=4))
params['vertline_t'].set_text('%0.3f' % params['epochs'].times[xdata])
params['plot_fun']()
def _plot_onkey(event, params):
"""Function to handle key presses."""
import matplotlib.pyplot as plt
if event.key == 'down':
if params['butterfly']:
return
params['ch_start'] += params['n_channels']
_channels_changed(params, len(params['ch_names']))
elif event.key == 'up':
if params['butterfly']:
return
params['ch_start'] -= params['n_channels']
_channels_changed(params, len(params['ch_names']))
elif event.key == 'left':
sample = params['t_start'] - params['duration']
sample = np.max([0, sample])
_plot_window(sample, params)
elif event.key == 'right':
sample = params['t_start'] + params['duration']
sample = np.min([sample, params['times'][-1] - params['duration']])
times = params['epoch_times']
xdata = times.flat[np.abs(times - sample).argmin()]
_plot_window(xdata, params)
elif event.key == '-':
if params['butterfly']:
params['butterfly_scale'] /= 1.1
else:
params['scale_factor'] /= 1.1
params['plot_fun']()
elif event.key in ['+', '=']:
if params['butterfly']:
params['butterfly_scale'] *= 1.1
else:
params['scale_factor'] *= 1.1
params['plot_fun']()
elif event.key == 'f11':
mng = plt.get_current_fig_manager()
mng.full_screen_toggle()
elif event.key == 'pagedown':
if params['n_channels'] == 1 or params['butterfly']:
return
n_channels = params['n_channels'] - 1
ylim = params['ax'].get_ylim()
offset = ylim[0] / n_channels
params['offsets'] = np.arange(n_channels) * offset + (offset / 2.)
params['n_channels'] = n_channels
params['ax'].collections.pop()
params['ax'].set_yticks(params['offsets'])
params['lines'].pop()
params['vsel_patch'].set_height(n_channels)
params['plot_fun']()
elif event.key == 'pageup':
if params['butterfly']:
return
from matplotlib.collections import LineCollection
n_channels = params['n_channels'] + 1
ylim = params['ax'].get_ylim()
offset = ylim[0] / n_channels
params['offsets'] = np.arange(n_channels) * offset + (offset / 2.)
params['n_channels'] = n_channels
lc = LineCollection(list(), antialiased=False, linewidths=0.5,
zorder=2, picker=3.)
params['ax'].add_collection(lc)
params['ax'].set_yticks(params['offsets'])
params['lines'].append(lc)
params['vsel_patch'].set_height(n_channels)
params['plot_fun']()
elif event.key == 'home':
n_epochs = params['n_epochs'] - 1
if n_epochs <= 0:
return
n_times = len(params['epochs'].times)
ticks = params['epoch_times'] + 0.5 * n_times
params['ax2'].set_xticks(ticks[:n_epochs])
params['n_epochs'] = n_epochs
params['duration'] -= n_times
params['hsel_patch'].set_width(params['duration'])
params['data'] = params['data'][:, :-n_times]
params['plot_update_proj_callback'](params)
elif event.key == 'end':
n_epochs = params['n_epochs'] + 1
n_times = len(params['epochs'].times)
if n_times * n_epochs > len(params['times']):
return
ticks = params['epoch_times'] + 0.5 * n_times
params['ax2'].set_xticks(ticks[:n_epochs])
params['n_epochs'] = n_epochs
if len(params['vert_lines']) > 0:
ax = params['ax']
pos = params['vert_lines'][0][0].get_data()[0] + params['duration']
params['vert_lines'].append(ax.plot(pos, ax.get_ylim(), 'y',
zorder=3))
params['duration'] += n_times
if params['t_start'] + params['duration'] > len(params['times']):
params['t_start'] -= n_times
params['hsel_patch'].set_x(params['t_start'])
params['hsel_patch'].set_width(params['duration'])
params['data'] = np.zeros((len(params['data']), params['duration']))
params['plot_update_proj_callback'](params)
elif event.key == 'b':
if params['fig_options'] is not None:
plt.close(params['fig_options'])
params['fig_options'] = None
_prepare_butterfly(params)
_plot_traces(params)
elif event.key == 'o':
if not params['butterfly']:
_open_options(params)
elif event.key == 'h':
_plot_histogram(params)
elif event.key == '?':
_onclick_help(event, params)
elif event.key == 'escape':
plt.close(params['fig'])
def _prepare_butterfly(params):
"""Helper function for setting up butterfly plot."""
from matplotlib.collections import LineCollection
butterfly = not params['butterfly']
if butterfly:
types = set(['grad', 'mag', 'eeg', 'eog',
'ecg']) & set(params['types'])
if len(types) < 1:
return
params['ax_vscroll'].set_visible(False)
ax = params['ax']
labels = ax.yaxis.get_ticklabels()
for label in labels:
label.set_visible(True)
ylim = (5. * len(types), 0.)
ax.set_ylim(ylim)
offset = ylim[0] / (4. * len(types))
ticks = np.arange(0, ylim[0], offset)
ticks = [ticks[x] if x < len(ticks) else 0 for x in range(20)]
ax.set_yticks(ticks)
used_types = 0
params['offsets'] = [ticks[2]]
if 'grad' in types:
pos = (0, 1 - (ticks[2] / ylim[0]))
params['ax2'].annotate('Grad (fT/cm)', xy=pos, xytext=(-70, 0),
ha='left', size=12, va='center',
xycoords='axes fraction', rotation=90,
textcoords='offset points')
used_types += 1
params['offsets'].append(ticks[2 + used_types * 4])
if 'mag' in types:
pos = (0, 1 - (ticks[2 + used_types * 4] / ylim[0]))
params['ax2'].annotate('Mag (fT)', xy=pos, xytext=(-70, 0),
ha='left', size=12, va='center',
xycoords='axes fraction', rotation=90,
textcoords='offset points')
used_types += 1
params['offsets'].append(ticks[2 + used_types * 4])
if 'eeg' in types:
pos = (0, 1 - (ticks[2 + used_types * 4] / ylim[0]))
params['ax2'].annotate('EEG (uV)', xy=pos, xytext=(-70, 0),
ha='left', size=12, va='center',
xycoords='axes fraction', rotation=90,
textcoords='offset points')
used_types += 1
params['offsets'].append(ticks[2 + used_types * 4])
if 'eog' in types:
pos = (0, 1 - (ticks[2 + used_types * 4] / ylim[0]))
params['ax2'].annotate('EOG (uV)', xy=pos, xytext=(-70, 0),
ha='left', size=12, va='center',
xycoords='axes fraction', rotation=90,
textcoords='offset points')
used_types += 1
params['offsets'].append(ticks[2 + used_types * 4])
if 'ecg' in types:
pos = (0, 1 - (ticks[2 + used_types * 4] / ylim[0]))
params['ax2'].annotate('ECG (uV)', xy=pos, xytext=(-70, 0),
ha='left', size=12, va='center',
xycoords='axes fraction', rotation=90,
textcoords='offset points')
used_types += 1
while len(params['lines']) < len(params['picks']):
lc = LineCollection(list(), antialiased=False, linewidths=0.5,
zorder=2, picker=3.)
ax.add_collection(lc)
params['lines'].append(lc)
else: # change back to default view
labels = params['ax'].yaxis.get_ticklabels()
for label in labels:
label.set_visible(params['settings'][0])
params['ax_vscroll'].set_visible(True)
while len(params['ax2'].texts) > 0:
params['ax2'].texts.pop()
n_channels = params['n_channels']
while len(params['lines']) > n_channels:
params['ax'].collections.pop()
params['lines'].pop()
ylim = (25., 0.)
params['ax'].set_ylim(ylim)
offset = ylim[0] / n_channels
params['offsets'] = np.arange(n_channels) * offset + (offset / 2.)
params['ax'].set_yticks(params['offsets'])
params['butterfly'] = butterfly
def _onpick(event, params):
"""Helper to add a channel name on click"""
if event.mouseevent.button != 2 or not params['butterfly']:
return # text label added with a middle mouse button
lidx = np.where([l is event.artist for l in params['lines']])[0][0]
text = params['text']
text.set_x(event.mouseevent.xdata)
text.set_y(event.mouseevent.ydata)
text.set_text(params['ch_names'][lidx])
text.set_visible(True)
# do NOT redraw here, since for butterfly plots hundreds of lines could
# potentially be picked -- use _mouse_click (happens once per click)
# to do the drawing
def _close_event(event, params):
"""Function to drop selected bad epochs. Called on closing of the plot."""
params['epochs'].drop_epochs(params['bads'])
params['epochs'].info['bads'] = params['info']['bads']
logger.info('Channels marked as bad: %s' % params['epochs'].info['bads'])
def _resize_event(event, params):
"""Function to handle resize event"""
size = ','.join([str(s) for s in params['fig'].get_size_inches()])
set_config('MNE_BROWSE_RAW_SIZE', size)
_layout_figure(params)
def _update_channels_epochs(event, params):
"""Function for changing the amount of channels and epochs per view."""
from matplotlib.collections import LineCollection
# Channels
n_channels = int(np.around(params['channel_slider'].val))
offset = params['ax'].get_ylim()[0] / n_channels
params['offsets'] = np.arange(n_channels) * offset + (offset / 2.)
while len(params['lines']) > n_channels:
params['ax'].collections.pop()
params['lines'].pop()
while len(params['lines']) < n_channels:
lc = LineCollection(list(), linewidths=0.5, antialiased=False,
zorder=2, picker=3.)
params['ax'].add_collection(lc)
params['lines'].append(lc)
params['ax'].set_yticks(params['offsets'])
params['vsel_patch'].set_height(n_channels)
params['n_channels'] = n_channels
# Epochs
n_epochs = int(np.around(params['epoch_slider'].val))
n_times = len(params['epochs'].times)
ticks = params['epoch_times'] + 0.5 * n_times
params['ax2'].set_xticks(ticks[:n_epochs])
params['n_epochs'] = n_epochs
params['duration'] = n_times * n_epochs
params['hsel_patch'].set_width(params['duration'])
params['data'] = np.zeros((len(params['data']), params['duration']))
if params['t_start'] + n_times * n_epochs > len(params['times']):
params['t_start'] = len(params['times']) - n_times * n_epochs
params['hsel_patch'].set_x(params['t_start'])
params['plot_update_proj_callback'](params)
def _toggle_labels(label, params):
"""Function for toggling axis labels on/off."""
if label == 'Channel names visible':
params['settings'][0] = not params['settings'][0]
labels = params['ax'].yaxis.get_ticklabels()
for label in labels:
label.set_visible(params['settings'][0])
elif label == 'Event-id visible':
params['settings'][1] = not params['settings'][1]
labels = params['ax2'].xaxis.get_ticklabels()
for label in labels:
label.set_visible(params['settings'][1])
elif label == 'Epoch-id visible':
params['settings'][2] = not params['settings'][2]
labels = params['ax'].xaxis.get_ticklabels()
for label in labels:
label.set_visible(params['settings'][2])
elif label == 'Zeroline visible':
params['settings'][3] = not params['settings'][3]
_plot_vert_lines(params)
params['fig'].canvas.draw()
if params['fig_proj'] is not None:
params['fig_proj'].canvas.draw()
def _open_options(params):
"""Function for opening the option window."""
import matplotlib.pyplot as plt
import matplotlib as mpl
if params['fig_options'] is not None:
# turn off options dialog
plt.close(params['fig_options'])
params['fig_options'] = None
return
width = 10
height = 3
fig_options = figure_nobar(figsize=(width, height), dpi=80)
fig_options.canvas.set_window_title('View settings')
params['fig_options'] = fig_options
ax_channels = plt.axes([0.15, 0.1, 0.65, 0.1])
ax_epochs = plt.axes([0.15, 0.25, 0.65, 0.1])
ax_button = plt.axes([0.85, 0.1, 0.1, 0.25])
ax_check = plt.axes([0.15, 0.4, 0.4, 0.55])
plt.axis('off')
params['update_button'] = mpl.widgets.Button(ax_button, 'Update')
params['channel_slider'] = mpl.widgets.Slider(ax_channels, 'Channels', 1,
len(params['ch_names']),
valfmt='%0.0f',
valinit=params['n_channels'])
params['epoch_slider'] = mpl.widgets.Slider(ax_epochs, 'Epochs', 1,
len(params['epoch_times']),
valfmt='%0.0f',
valinit=params['n_epochs'])
params['checkbox'] = mpl.widgets.CheckButtons(ax_check,
['Channel names visible',
'Event-id visible',
'Epoch-id visible',
'Zeroline visible'],
actives=params['settings'])
update = partial(_update_channels_epochs, params=params)
params['update_button'].on_clicked(update)
labels_callback = partial(_toggle_labels, params=params)
params['checkbox'].on_clicked(labels_callback)
close_callback = partial(_settings_closed, params=params)
params['fig_options'].canvas.mpl_connect('close_event', close_callback)
try:
params['fig_options'].canvas.draw()
params['fig_options'].show(warn=False)
if params['fig_proj'] is not None:
params['fig_proj'].canvas.draw()
except Exception:
pass
def _settings_closed(events, params):
"""Function to handle close event from settings dialog."""
params['fig_options'] = None
def _plot_histogram(params):
"""Function for plotting histogram of peak-to-peak values."""
import matplotlib.pyplot as plt
epochs = params['epochs']
p2p = np.ptp(epochs.get_data(), axis=2)
types = list()
data = list()
if 'eeg' in params['types']:
eegs = np.array([p2p.T[i] for i,
x in enumerate(params['types']) if x == 'eeg'])
data.append(eegs.ravel())
types.append('eeg')
if 'mag' in params['types']:
mags = np.array([p2p.T[i] for i,
x in enumerate(params['types']) if x == 'mag'])
data.append(mags.ravel())
types.append('mag')
if 'grad' in params['types']:
grads = np.array([p2p.T[i] for i,
x in enumerate(params['types']) if x == 'grad'])
data.append(grads.ravel())
types.append('grad')
params['histogram'] = plt.figure()
scalings = _handle_default('scalings')
units = _handle_default('units')
titles = _handle_default('titles')
colors = _handle_default('color')
for idx in range(len(types)):
ax = plt.subplot(len(types), 1, idx + 1)
plt.xlabel(units[types[idx]])
plt.ylabel('count')
color = colors[types[idx]]
rej = None
if epochs.reject is not None and types[idx] in epochs.reject.keys():
rej = epochs.reject[types[idx]] * scalings[types[idx]]
rng = [0., rej * 1.1]
else:
rng = None
plt.hist(data[idx] * scalings[types[idx]], bins=100, color=color,
range=rng)
if rej is not None:
ax.plot((rej, rej), (0, ax.get_ylim()[1]), color='r')
plt.title(titles[types[idx]])
params['histogram'].suptitle('Peak-to-peak histogram', y=0.99)
params['histogram'].subplots_adjust(hspace=0.6)
try:
params['histogram'].show(warn=False)
except:
pass
if params['fig_proj'] is not None:
params['fig_proj'].canvas.draw()
def _label2idx(params, pos):
"""Aux function for click on labels. Returns channel name and idx."""
labels = params['ax'].yaxis.get_ticklabels()
offsets = np.array(params['offsets']) + params['offsets'][0]
line_idx = np.searchsorted(offsets, pos[1])
text = labels[line_idx].get_text()
if len(text) == 0:
return None, None
ch_idx = params['ch_start'] + line_idx
return text, ch_idx
|
cmoutard/mne-python
|
mne/viz/epochs.py
|
Python
|
bsd-3-clause
| 62,694
|
#!/usr/bin/env python3
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Meta checkout dependency manager for Git."""
# Files
# .gclient : Current client configuration, written by 'config' command.
# Format is a Python script defining 'solutions', a list whose
# entries each are maps binding the strings "name" and "url"
# to strings specifying the name and location of the client
# module, as well as "custom_deps" to a map similar to the
# deps section of the DEPS file below, as well as
# "custom_hooks" to a list similar to the hooks sections of
# the DEPS file below.
# .gclient_entries : A cache constructed by 'update' command. Format is a
# Python script defining 'entries', a list of the names
# of all modules in the client
# <module>/DEPS : Python script defining var 'deps' as a map from each
# requisite submodule name to a URL where it can be found (via
# one SCM)
#
# Hooks
# .gclient and DEPS files may optionally contain a list named "hooks" to
# allow custom actions to be performed based on files that have changed in the
# working copy as a result of a "sync"/"update" or "revert" operation. This
# can be prevented by using --nohooks (hooks run by default). Hooks can also
# be forced to run with the "runhooks" operation. If "sync" is run with
# --force, all known but not suppressed hooks will run regardless of the state
# of the working copy.
#
# Each item in a "hooks" list is a dict, containing these two keys:
# "pattern" The associated value is a string containing a regular
# expression. When a file whose pathname matches the expression
# is checked out, updated, or reverted, the hook's "action" will
# run.
# "action" A list describing a command to run along with its arguments, if
# any. An action command will run at most one time per gclient
# invocation, regardless of how many files matched the pattern.
# The action is executed in the same directory as the .gclient
# file. If the first item in the list is the string "python",
# the current Python interpreter (sys.executable) will be used
# to run the command. If the list contains string
# "$matching_files" it will be removed from the list and the list
# will be extended by the list of matching files.
# "name" An optional string specifying the group to which a hook belongs
# for overriding and organizing.
#
# Example:
# hooks = [
# { "pattern": "\\.(gif|jpe?g|pr0n|png)$",
# "action": ["python", "image_indexer.py", "--all"]},
# { "pattern": ".",
# "name": "gyp",
# "action": ["python", "src/build/gyp_chromium"]},
# ]
#
# Pre-DEPS Hooks
# DEPS files may optionally contain a list named "pre_deps_hooks". These are
# the same as normal hooks, except that they run before the DEPS are
# processed. Pre-DEPS run with "sync" and "revert" unless the --noprehooks
# flag is used.
#
# Specifying a target OS
# An optional key named "target_os" may be added to a gclient file to specify
# one or more additional operating systems that should be considered when
# processing the deps_os/hooks_os dict of a DEPS file.
#
# Example:
# target_os = [ "android" ]
#
# If the "target_os_only" key is also present and true, then *only* the
# operating systems listed in "target_os" will be used.
#
# Example:
# target_os = [ "ios" ]
# target_os_only = True
#
# Specifying a target CPU
# To specify a target CPU, the variables target_cpu and target_cpu_only
# are available and are analogous to target_os and target_os_only.
from __future__ import print_function
__version__ = '0.7'
import collections
import copy
import json
import logging
import optparse
import os
import platform
import posixpath
import pprint
import re
import sys
import time
try:
import urlparse
except ImportError: # For Py3 compatibility
import urllib.parse as urlparse
import detect_host_arch
import fix_encoding
import gclient_eval
import gclient_scm
import gclient_paths
import gclient_utils
import git_cache
import metrics
import metrics_utils
from third_party.repo.progress import Progress
import subcommand
import subprocess2
import setup_color
from third_party import six
# TODO(crbug.com/953884): Remove this when python3 migration is done.
if six.PY3:
# pylint: disable=redefined-builtin
basestring = str
DEPOT_TOOLS_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__)))
# Singleton object to represent an unset cache_dir (as opposed to a disabled
# one, e.g. if a spec explicitly says `cache_dir = None`.)
UNSET_CACHE_DIR = object()
class GNException(Exception):
pass
def ToGNString(value, allow_dicts = True):
"""Returns a stringified GN equivalent of the Python value.
allow_dicts indicates if this function will allow converting dictionaries
to GN scopes. This is only possible at the top level, you can't nest a
GN scope in a list, so this should be set to False for recursive calls."""
if isinstance(value, basestring):
if value.find('\n') >= 0:
raise GNException("Trying to print a string with a newline in it.")
return '"' + \
value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \
'"'
if sys.version_info.major == 2 and isinstance(value, unicode):
return ToGNString(value.encode('utf-8'))
if isinstance(value, bool):
if value:
return "true"
return "false"
# NOTE: some type handling removed compared to chromium/src copy.
raise GNException("Unsupported type when printing to GN.")
class Hook(object):
"""Descriptor of command ran before/after sync or on demand."""
def __init__(self, action, pattern=None, name=None, cwd=None, condition=None,
variables=None, verbose=False, cwd_base=None):
"""Constructor.
Arguments:
action (list of basestring): argv of the command to run
pattern (basestring regex): noop with git; deprecated
name (basestring): optional name; no effect on operation
cwd (basestring): working directory to use
condition (basestring): condition when to run the hook
variables (dict): variables for evaluating the condition
"""
self._action = gclient_utils.freeze(action)
self._pattern = pattern
self._name = name
self._cwd = cwd
self._condition = condition
self._variables = variables
self._verbose = verbose
self._cwd_base = cwd_base
@staticmethod
def from_dict(d, variables=None, verbose=False, conditions=None,
cwd_base=None):
"""Creates a Hook instance from a dict like in the DEPS file."""
# Merge any local and inherited conditions.
gclient_eval.UpdateCondition(d, 'and', conditions)
return Hook(
d['action'],
d.get('pattern'),
d.get('name'),
d.get('cwd'),
d.get('condition'),
variables=variables,
# Always print the header if not printing to a TTY.
verbose=verbose or not setup_color.IS_TTY,
cwd_base=cwd_base)
@property
def action(self):
return self._action
@property
def pattern(self):
return self._pattern
@property
def name(self):
return self._name
@property
def condition(self):
return self._condition
@property
def effective_cwd(self):
cwd = self._cwd_base
if self._cwd:
cwd = os.path.join(cwd, self._cwd)
return cwd
def matches(self, file_list):
"""Returns true if the pattern matches any of files in the list."""
if not self._pattern:
return True
pattern = re.compile(self._pattern)
return bool([f for f in file_list if pattern.search(f)])
def run(self):
"""Executes the hook's command (provided the condition is met)."""
if (self._condition and
not gclient_eval.EvaluateCondition(self._condition, self._variables)):
return
cmd = list(self._action)
if cmd[0] == 'python':
cmd[0] = 'vpython'
if cmd[0] == 'vpython' and _detect_host_os() == 'win':
cmd[0] += '.bat'
exit_code = 2
try:
start_time = time.time()
gclient_utils.CheckCallAndFilter(
cmd, cwd=self.effective_cwd, print_stdout=True, show_header=True,
always_show_header=self._verbose)
exit_code = 0
except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
# Use a discrete exit status code of 2 to indicate that a hook action
# failed. Users of this script may wish to treat hook action failures
# differently from VC failures.
print('Error: %s' % str(e), file=sys.stderr)
sys.exit(exit_code)
finally:
elapsed_time = time.time() - start_time
metrics.collector.add_repeated('hooks', {
'action': gclient_utils.CommandToStr(cmd),
'name': self._name,
'cwd': os.path.relpath(
os.path.normpath(self.effective_cwd),
self._cwd_base),
'condition': self._condition,
'execution_time': elapsed_time,
'exit_code': exit_code,
})
if elapsed_time > 10:
print("Hook '%s' took %.2f secs" % (
gclient_utils.CommandToStr(cmd), elapsed_time))
class DependencySettings(object):
"""Immutable configuration settings."""
def __init__(
self, parent, url, managed, custom_deps, custom_vars,
custom_hooks, deps_file, should_process, relative, condition):
# These are not mutable:
self._parent = parent
self._deps_file = deps_file
self._url = url
# The condition as string (or None). Useful to keep e.g. for flatten.
self._condition = condition
# 'managed' determines whether or not this dependency is synced/updated by
# gclient after gclient checks it out initially. The difference between
# 'managed' and 'should_process' is that the user specifies 'managed' via
# the --unmanaged command-line flag or a .gclient config, where
# 'should_process' is dynamically set by gclient if it goes over its
# recursion limit and controls gclient's behavior so it does not misbehave.
self._managed = managed
self._should_process = should_process
# If this is a recursed-upon sub-dependency, and the parent has
# use_relative_paths set, then this dependency should check out its own
# dependencies relative to that parent's path for this, rather than
# relative to the .gclient file.
self._relative = relative
# This is a mutable value which has the list of 'target_os' OSes listed in
# the current deps file.
self.local_target_os = None
# These are only set in .gclient and not in DEPS files.
self._custom_vars = custom_vars or {}
self._custom_deps = custom_deps or {}
self._custom_hooks = custom_hooks or []
# Post process the url to remove trailing slashes.
if isinstance(self.url, basestring):
# urls are sometime incorrectly written as proto://host/path/@rev. Replace
# it to proto://host/path@rev.
self.set_url(self.url.replace('/@', '@'))
elif not isinstance(self.url, (None.__class__)):
raise gclient_utils.Error(
('dependency url must be either string or None, '
'instead of %s') % self.url.__class__.__name__)
# Make any deps_file path platform-appropriate.
if self._deps_file:
for sep in ['/', '\\']:
self._deps_file = self._deps_file.replace(sep, os.sep)
@property
def deps_file(self):
return self._deps_file
@property
def managed(self):
return self._managed
@property
def parent(self):
return self._parent
@property
def root(self):
"""Returns the root node, a GClient object."""
if not self.parent:
# This line is to signal pylint that it could be a GClient instance.
return self or GClient(None, None)
return self.parent.root
@property
def should_process(self):
"""True if this dependency should be processed, i.e. checked out."""
return self._should_process
@property
def custom_vars(self):
return self._custom_vars.copy()
@property
def custom_deps(self):
return self._custom_deps.copy()
@property
def custom_hooks(self):
return self._custom_hooks[:]
@property
def url(self):
"""URL after variable expansion."""
return self._url
@property
def condition(self):
return self._condition
@property
def target_os(self):
if self.local_target_os is not None:
return tuple(set(self.local_target_os).union(self.parent.target_os))
return self.parent.target_os
@property
def target_cpu(self):
return self.parent.target_cpu
def set_url(self, url):
self._url = url
def get_custom_deps(self, name, url):
"""Returns a custom deps if applicable."""
if self.parent:
url = self.parent.get_custom_deps(name, url)
# None is a valid return value to disable a dependency.
return self.custom_deps.get(name, url)
class Dependency(gclient_utils.WorkItem, DependencySettings):
"""Object that represents a dependency checkout."""
def __init__(self, parent, name, url, managed, custom_deps,
custom_vars, custom_hooks, deps_file, should_process,
should_recurse, relative, condition, print_outbuf=False):
gclient_utils.WorkItem.__init__(self, name)
DependencySettings.__init__(
self, parent, url, managed, custom_deps, custom_vars,
custom_hooks, deps_file, should_process, relative, condition)
# This is in both .gclient and DEPS files:
self._deps_hooks = []
self._pre_deps_hooks = []
# Calculates properties:
self._dependencies = []
self._vars = {}
# A cache of the files affected by the current operation, necessary for
# hooks.
self._file_list = []
# List of host names from which dependencies are allowed.
# Default is an empty set, meaning unspecified in DEPS file, and hence all
# hosts will be allowed. Non-empty set means allowlist of hosts.
# allowed_hosts var is scoped to its DEPS file, and so it isn't recursive.
self._allowed_hosts = frozenset()
self._gn_args_from = None
# Spec for .gni output to write (if any).
self._gn_args_file = None
self._gn_args = []
# If it is not set to True, the dependency wasn't processed for its child
# dependency, i.e. its DEPS wasn't read.
self._deps_parsed = False
# This dependency has been processed, i.e. checked out
self._processed = False
# This dependency had its pre-DEPS hooks run
self._pre_deps_hooks_ran = False
# This dependency had its hook run
self._hooks_ran = False
# This is the scm used to checkout self.url. It may be used by dependencies
# to get the datetime of the revision we checked out.
self._used_scm = None
self._used_revision = None
# The actual revision we ended up getting, or None if that information is
# unavailable
self._got_revision = None
# Whether this dependency should use relative paths.
self._use_relative_paths = False
# recursedeps is a mutable value that selectively overrides the default
# 'no recursion' setting on a dep-by-dep basis.
#
# It will be a dictionary of {deps_name: depfile_namee}
self.recursedeps = {}
# Whether we should process this dependency's DEPS file.
self._should_recurse = should_recurse
self._OverrideUrl()
# This is inherited from WorkItem. We want the URL to be a resource.
if self.url and isinstance(self.url, basestring):
# The url is usually given to gclient either as https://blah@123
# or just https://blah. The @123 portion is irrelevant.
self.resources.append(self.url.split('@')[0])
# Controls whether we want to print git's output when we first clone the
# dependency
self.print_outbuf = print_outbuf
if not self.name and self.parent:
raise gclient_utils.Error('Dependency without name')
def _OverrideUrl(self):
"""Resolves the parsed url from the parent hierarchy."""
parsed_url = self.get_custom_deps(
self._name.replace(os.sep, posixpath.sep) \
if self._name else self._name, self.url)
if parsed_url != self.url:
logging.info('Dependency(%s)._OverrideUrl(%s) -> %s', self._name,
self.url, parsed_url)
self.set_url(parsed_url)
return
if self.url is None:
logging.info('Dependency(%s)._OverrideUrl(None) -> None', self._name)
return
if not isinstance(self.url, basestring):
raise gclient_utils.Error('Unknown url type')
# self.url is a local path
path, at, rev = self.url.partition('@')
if os.path.isdir(path):
return
# self.url is a URL
parsed_url = urlparse.urlparse(self.url)
if parsed_url[0] or re.match(r'^\w+\@[\w\.-]+\:[\w\/]+', parsed_url[2]):
return
# self.url is relative to the parent's URL.
if not path.startswith('/'):
raise gclient_utils.Error(
'relative DEPS entry \'%s\' must begin with a slash' % self.url)
parent_url = self.parent.url
parent_path = self.parent.url.split('@')[0]
if os.path.isdir(parent_path):
# Parent's URL is a local path. Get parent's URL dirname and append
# self.url.
parent_path = os.path.dirname(parent_path)
parsed_url = parent_path + path.replace('/', os.sep) + at + rev
else:
# Parent's URL is a URL. Get parent's URL, strip from the last '/'
# (equivalent to unix dirname) and append self.url.
parsed_url = parent_url[:parent_url.rfind('/')] + self.url
logging.info('Dependency(%s)._OverrideUrl(%s) -> %s', self.name,
self.url, parsed_url)
self.set_url(parsed_url)
def PinToActualRevision(self):
"""Updates self.url to the revision checked out on disk."""
if self.url is None:
return
url = None
scm = self.CreateSCM()
if os.path.isdir(scm.checkout_path):
revision = scm.revinfo(None, None, None)
url = '%s@%s' % (gclient_utils.SplitUrlRevision(self.url)[0], revision)
self.set_url(url)
def ToLines(self):
s = []
condition_part = ([' "condition": %r,' % self.condition]
if self.condition else [])
s.extend([
' # %s' % self.hierarchy(include_url=False),
' "%s": {' % (self.name,),
' "url": "%s",' % (self.url,),
] + condition_part + [
' },',
'',
])
return s
@property
def requirements(self):
"""Calculate the list of requirements."""
requirements = set()
# self.parent is implicitly a requirement. This will be recursive by
# definition.
if self.parent and self.parent.name:
requirements.add(self.parent.name)
# For a tree with at least 2 levels*, the leaf node needs to depend
# on the level higher up in an orderly way.
# This becomes messy for >2 depth as the DEPS file format is a dictionary,
# thus unsorted, while the .gclient format is a list thus sorted.
#
# Interestingly enough, the following condition only works in the case we
# want: self is a 2nd level node. 3rd level node wouldn't need this since
# they already have their parent as a requirement.
if self.parent and self.parent.parent and not self.parent.parent.parent:
requirements |= set(i.name for i in self.root.dependencies if i.name)
if self.name:
requirements |= set(
obj.name for obj in self.root.subtree(False)
if (obj is not self
and obj.name and
self.name.startswith(posixpath.join(obj.name, ''))))
requirements = tuple(sorted(requirements))
logging.info('Dependency(%s).requirements = %s' % (self.name, requirements))
return requirements
@property
def should_recurse(self):
return self._should_recurse
def verify_validity(self):
"""Verifies that this Dependency is fine to add as a child of another one.
Returns True if this entry should be added, False if it is a duplicate of
another entry.
"""
logging.info('Dependency(%s).verify_validity()' % self.name)
if self.name in [s.name for s in self.parent.dependencies]:
raise gclient_utils.Error(
'The same name "%s" appears multiple times in the deps section' %
self.name)
if not self.should_process:
# Return early, no need to set requirements.
return not any(d.name == self.name for d in self.root.subtree(True))
# This require a full tree traversal with locks.
siblings = [d for d in self.root.subtree(False) if d.name == self.name]
for sibling in siblings:
# Allow to have only one to be None or ''.
if self.url != sibling.url and bool(self.url) == bool(sibling.url):
raise gclient_utils.Error(
('Dependency %s specified more than once:\n'
' %s [%s]\n'
'vs\n'
' %s [%s]') % (
self.name,
sibling.hierarchy(),
sibling.url,
self.hierarchy(),
self.url))
# In theory we could keep it as a shadow of the other one. In
# practice, simply ignore it.
logging.warning("Won't process duplicate dependency %s" % sibling)
return False
return True
def _postprocess_deps(self, deps, rel_prefix):
"""Performs post-processing of deps compared to what's in the DEPS file."""
# Make sure the dict is mutable, e.g. in case it's frozen.
deps = dict(deps)
# If a line is in custom_deps, but not in the solution, we want to append
# this line to the solution.
for dep_name, dep_info in self.custom_deps.items():
if dep_name not in deps:
deps[dep_name] = {'url': dep_info, 'dep_type': 'git'}
# Make child deps conditional on any parent conditions. This ensures that,
# when flattened, recursed entries have the correct restrictions, even if
# not explicitly set in the recursed DEPS file. For instance, if
# "src/ios_foo" is conditional on "checkout_ios=True", then anything
# recursively included by "src/ios_foo/DEPS" should also require
# "checkout_ios=True".
if self.condition:
for value in deps.values():
gclient_eval.UpdateCondition(value, 'and', self.condition)
if rel_prefix:
logging.warning('use_relative_paths enabled.')
rel_deps = {}
for d, url in deps.items():
# normpath is required to allow DEPS to use .. in their
# dependency local path.
rel_deps[os.path.normpath(os.path.join(rel_prefix, d))] = url
logging.warning('Updating deps by prepending %s.', rel_prefix)
deps = rel_deps
return deps
def _deps_to_objects(self, deps, use_relative_paths):
"""Convert a deps dict to a dict of Dependency objects."""
deps_to_add = []
cached_conditions = {}
for name, dep_value in deps.items():
should_process = self.should_process
if dep_value is None:
continue
condition = dep_value.get('condition')
dep_type = dep_value.get('dep_type')
if condition and not self._get_option('process_all_deps', False):
if condition not in cached_conditions:
cached_conditions[condition] = gclient_eval.EvaluateCondition(
condition, self.get_vars())
should_process = should_process and cached_conditions[condition]
# The following option is only set by the 'revinfo' command.
if self._get_option('ignore_dep_type', None) == dep_type:
continue
if dep_type == 'cipd':
cipd_root = self.GetCipdRoot()
for package in dep_value.get('packages', []):
deps_to_add.append(
CipdDependency(
parent=self,
name=name,
dep_value=package,
cipd_root=cipd_root,
custom_vars=self.custom_vars,
should_process=should_process,
relative=use_relative_paths,
condition=condition))
else:
url = dep_value.get('url')
deps_to_add.append(
GitDependency(
parent=self,
name=name,
url=url,
managed=True,
custom_deps=None,
custom_vars=self.custom_vars,
custom_hooks=None,
deps_file=self.recursedeps.get(name, self.deps_file),
should_process=should_process,
should_recurse=name in self.recursedeps,
relative=use_relative_paths,
condition=condition))
deps_to_add.sort(key=lambda x: x.name)
return deps_to_add
def ParseDepsFile(self):
"""Parses the DEPS file for this dependency."""
assert not self.deps_parsed
assert not self.dependencies
deps_content = None
# First try to locate the configured deps file. If it's missing, fallback
# to DEPS.
deps_files = [self.deps_file]
if 'DEPS' not in deps_files:
deps_files.append('DEPS')
for deps_file in deps_files:
filepath = os.path.join(self.root.root_dir, self.name, deps_file)
if os.path.isfile(filepath):
logging.info(
'ParseDepsFile(%s): %s file found at %s', self.name, deps_file,
filepath)
break
logging.info(
'ParseDepsFile(%s): No %s file found at %s', self.name, deps_file,
filepath)
if os.path.isfile(filepath):
deps_content = gclient_utils.FileRead(filepath)
logging.debug('ParseDepsFile(%s) read:\n%s', self.name, deps_content)
local_scope = {}
if deps_content:
try:
local_scope = gclient_eval.Parse(
deps_content, filepath, self.get_vars(), self.get_builtin_vars())
except SyntaxError as e:
gclient_utils.SyntaxErrorToError(filepath, e)
if 'allowed_hosts' in local_scope:
try:
self._allowed_hosts = frozenset(local_scope.get('allowed_hosts'))
except TypeError: # raised if non-iterable
pass
if not self._allowed_hosts:
logging.warning("allowed_hosts is specified but empty %s",
self._allowed_hosts)
raise gclient_utils.Error(
'ParseDepsFile(%s): allowed_hosts must be absent '
'or a non-empty iterable' % self.name)
self._gn_args_from = local_scope.get('gclient_gn_args_from')
self._gn_args_file = local_scope.get('gclient_gn_args_file')
self._gn_args = local_scope.get('gclient_gn_args', [])
# It doesn't make sense to set all of these, since setting gn_args_from to
# another DEPS will make gclient ignore any other local gn_args* settings.
assert not (self._gn_args_from and self._gn_args_file), \
'Only specify one of "gclient_gn_args_from" or ' \
'"gclient_gn_args_file + gclient_gn_args".'
self._vars = local_scope.get('vars', {})
if self.parent:
for key, value in self.parent.get_vars().items():
if key in self._vars:
self._vars[key] = value
# Since we heavily post-process things, freeze ones which should
# reflect original state of DEPS.
self._vars = gclient_utils.freeze(self._vars)
# If use_relative_paths is set in the DEPS file, regenerate
# the dictionary using paths relative to the directory containing
# the DEPS file. Also update recursedeps if use_relative_paths is
# enabled.
# If the deps file doesn't set use_relative_paths, but the parent did
# (and therefore set self.relative on this Dependency object), then we
# want to modify the deps and recursedeps by prepending the parent
# directory of this dependency.
self._use_relative_paths = local_scope.get('use_relative_paths', False)
rel_prefix = None
if self._use_relative_paths:
rel_prefix = self.name
elif self._relative:
rel_prefix = os.path.dirname(self.name)
if 'recursion' in local_scope:
logging.warning(
'%s: Ignoring recursion = %d.', self.name, local_scope['recursion'])
if 'recursedeps' in local_scope:
for ent in local_scope['recursedeps']:
if isinstance(ent, basestring):
self.recursedeps[ent] = self.deps_file
else: # (depname, depsfilename)
self.recursedeps[ent[0]] = ent[1]
logging.warning('Found recursedeps %r.', repr(self.recursedeps))
if rel_prefix:
logging.warning('Updating recursedeps by prepending %s.', rel_prefix)
rel_deps = {}
for depname, options in self.recursedeps.items():
rel_deps[
os.path.normpath(os.path.join(rel_prefix, depname))] = options
self.recursedeps = rel_deps
# To get gn_args from another DEPS, that DEPS must be recursed into.
if self._gn_args_from:
assert self.recursedeps and self._gn_args_from in self.recursedeps, \
'The "gclient_gn_args_from" value must be in recursedeps.'
# If present, save 'target_os' in the local_target_os property.
if 'target_os' in local_scope:
self.local_target_os = local_scope['target_os']
deps = local_scope.get('deps', {})
deps_to_add = self._deps_to_objects(
self._postprocess_deps(deps, rel_prefix), self._use_relative_paths)
# compute which working directory should be used for hooks
if local_scope.get('use_relative_hooks', False):
print('use_relative_hooks is deprecated, please remove it from DEPS. ' +
'(it was merged in use_relative_paths)', file=sys.stderr)
hooks_cwd = self.root.root_dir
if self._use_relative_paths:
hooks_cwd = os.path.join(hooks_cwd, self.name)
logging.warning('Updating hook base working directory to %s.',
hooks_cwd)
# override named sets of hooks by the custom hooks
hooks_to_run = []
hook_names_to_suppress = [c.get('name', '') for c in self.custom_hooks]
for hook in local_scope.get('hooks', []):
if hook.get('name', '') not in hook_names_to_suppress:
hooks_to_run.append(hook)
# add the replacements and any additions
for hook in self.custom_hooks:
if 'action' in hook:
hooks_to_run.append(hook)
if self.should_recurse:
self._pre_deps_hooks = [
Hook.from_dict(hook, variables=self.get_vars(), verbose=True,
conditions=self.condition, cwd_base=hooks_cwd)
for hook in local_scope.get('pre_deps_hooks', [])
]
self.add_dependencies_and_close(deps_to_add, hooks_to_run,
hooks_cwd=hooks_cwd)
logging.info('ParseDepsFile(%s) done' % self.name)
def _get_option(self, attr, default):
obj = self
while not hasattr(obj, '_options'):
obj = obj.parent
return getattr(obj._options, attr, default)
def add_dependencies_and_close(self, deps_to_add, hooks, hooks_cwd=None):
"""Adds the dependencies, hooks and mark the parsing as done."""
if hooks_cwd == None:
hooks_cwd = self.root.root_dir
for dep in deps_to_add:
if dep.verify_validity():
self.add_dependency(dep)
self._mark_as_parsed([
Hook.from_dict(
h, variables=self.get_vars(), verbose=self.root._options.verbose,
conditions=self.condition, cwd_base=hooks_cwd)
for h in hooks
])
def findDepsFromNotAllowedHosts(self):
"""Returns a list of dependencies from not allowed hosts.
If allowed_hosts is not set, allows all hosts and returns empty list.
"""
if not self._allowed_hosts:
return []
bad_deps = []
for dep in self._dependencies:
# Don't enforce this for custom_deps.
if dep.name in self._custom_deps:
continue
if isinstance(dep.url, basestring):
parsed_url = urlparse.urlparse(dep.url)
if parsed_url.netloc and parsed_url.netloc not in self._allowed_hosts:
bad_deps.append(dep)
return bad_deps
def FuzzyMatchUrl(self, candidates):
"""Attempts to find this dependency in the list of candidates.
It looks first for the URL of this dependency in the list of
candidates. If it doesn't succeed, and the URL ends in '.git', it will try
looking for the URL minus '.git'. Finally it will try to look for the name
of the dependency.
Args:
candidates: list, dict. The list of candidates in which to look for this
dependency. It can contain URLs as above, or dependency names like
"src/some/dep".
Returns:
If this dependency is not found in the list of candidates, returns None.
Otherwise, it returns under which name did we find this dependency:
- Its parsed url: "https://example.com/src.git'
- Its parsed url minus '.git': "https://example.com/src"
- Its name: "src"
"""
if self.url:
origin, _ = gclient_utils.SplitUrlRevision(self.url)
if origin in candidates:
return origin
if origin.endswith('.git') and origin[:-len('.git')] in candidates:
return origin[:-len('.git')]
if origin + '.git' in candidates:
return origin + '.git'
if self.name in candidates:
return self.name
return None
# Arguments number differs from overridden method
# pylint: disable=arguments-differ
def run(self, revision_overrides, command, args, work_queue, options,
patch_refs, target_branches):
"""Runs |command| then parse the DEPS file."""
logging.info('Dependency(%s).run()' % self.name)
assert self._file_list == []
if not self.should_process:
return
# When running runhooks, there's no need to consult the SCM.
# All known hooks are expected to run unconditionally regardless of working
# copy state, so skip the SCM status check.
run_scm = command not in (
'flatten', 'runhooks', 'recurse', 'validate', None)
file_list = [] if not options.nohooks else None
revision_override = revision_overrides.pop(
self.FuzzyMatchUrl(revision_overrides), None)
if not revision_override and not self.managed:
revision_override = 'unmanaged'
if run_scm and self.url:
# Create a shallow copy to mutate revision.
options = copy.copy(options)
options.revision = revision_override
self._used_revision = options.revision
self._used_scm = self.CreateSCM(out_cb=work_queue.out_cb)
if command != 'update' or self.GetScmName() != 'git':
self._got_revision = self._used_scm.RunCommand(command, options, args,
file_list)
else:
try:
start = time.time()
sync_status = metrics_utils.SYNC_STATUS_FAILURE
self._got_revision = self._used_scm.RunCommand(command, options, args,
file_list)
sync_status = metrics_utils.SYNC_STATUS_SUCCESS
finally:
url, revision = gclient_utils.SplitUrlRevision(self.url)
metrics.collector.add_repeated('git_deps', {
'path': self.name,
'url': url,
'revision': revision,
'execution_time': time.time() - start,
'sync_status': sync_status,
})
patch_repo = self.url.split('@')[0]
patch_ref = patch_refs.pop(self.FuzzyMatchUrl(patch_refs), None)
target_branch = target_branches.pop(
self.FuzzyMatchUrl(target_branches), None)
if command == 'update' and patch_ref is not None:
self._used_scm.apply_patch_ref(patch_repo, patch_ref, target_branch,
options, file_list)
if file_list:
file_list = [os.path.join(self.name, f.strip()) for f in file_list]
# TODO(phajdan.jr): We should know exactly when the paths are absolute.
# Convert all absolute paths to relative.
for i in range(len(file_list or [])):
# It depends on the command being executed (like runhooks vs sync).
if not os.path.isabs(file_list[i]):
continue
prefix = os.path.commonprefix(
[self.root.root_dir.lower(), file_list[i].lower()])
file_list[i] = file_list[i][len(prefix):]
# Strip any leading path separators.
while file_list[i].startswith(('\\', '/')):
file_list[i] = file_list[i][1:]
if self.should_recurse:
self.ParseDepsFile()
self._run_is_done(file_list or [])
if self.should_recurse:
if command in ('update', 'revert') and not options.noprehooks:
self.RunPreDepsHooks()
# Parse the dependencies of this dependency.
for s in self.dependencies:
if s.should_process:
work_queue.enqueue(s)
if command == 'recurse':
# Skip file only checkout.
scm = self.GetScmName()
if not options.scm or scm in options.scm:
cwd = os.path.normpath(os.path.join(self.root.root_dir, self.name))
# Pass in the SCM type as an env variable. Make sure we don't put
# unicode strings in the environment.
env = os.environ.copy()
if scm:
env['GCLIENT_SCM'] = str(scm)
if self.url:
env['GCLIENT_URL'] = str(self.url)
env['GCLIENT_DEP_PATH'] = str(self.name)
if options.prepend_dir and scm == 'git':
print_stdout = False
def filter_fn(line):
"""Git-specific path marshaling. It is optimized for git-grep."""
def mod_path(git_pathspec):
match = re.match('^(\\S+?:)?([^\0]+)$', git_pathspec)
modified_path = os.path.join(self.name, match.group(2))
branch = match.group(1) or ''
return '%s%s' % (branch, modified_path)
match = re.match('^Binary file ([^\0]+) matches$', line)
if match:
print('Binary file %s matches\n' % mod_path(match.group(1)))
return
items = line.split('\0')
if len(items) == 2 and items[1]:
print('%s : %s' % (mod_path(items[0]), items[1]))
elif len(items) >= 2:
# Multiple null bytes or a single trailing null byte indicate
# git is likely displaying filenames only (such as with -l)
print('\n'.join(mod_path(path) for path in items if path))
else:
print(line)
else:
print_stdout = True
filter_fn = None
if self.url is None:
print('Skipped omitted dependency %s' % cwd, file=sys.stderr)
elif os.path.isdir(cwd):
try:
gclient_utils.CheckCallAndFilter(
args, cwd=cwd, env=env, print_stdout=print_stdout,
filter_fn=filter_fn,
)
except subprocess2.CalledProcessError:
if not options.ignore:
raise
else:
print('Skipped missing %s' % cwd, file=sys.stderr)
def GetScmName(self):
raise NotImplementedError()
def CreateSCM(self, out_cb=None):
raise NotImplementedError()
def HasGNArgsFile(self):
return self._gn_args_file is not None
def WriteGNArgsFile(self):
lines = ['# Generated from %r' % self.deps_file]
variables = self.get_vars()
for arg in self._gn_args:
value = variables[arg]
if isinstance(value, gclient_eval.ConstantString):
value = value.value
elif isinstance(value, basestring):
value = gclient_eval.EvaluateCondition(value, variables)
lines.append('%s = %s' % (arg, ToGNString(value)))
# When use_relative_paths is set, gn_args_file is relative to this DEPS
path_prefix = self.root.root_dir
if self._use_relative_paths:
path_prefix = os.path.join(path_prefix, self.name)
with open(os.path.join(path_prefix, self._gn_args_file), 'wb') as f:
f.write('\n'.join(lines).encode('utf-8', 'replace'))
@gclient_utils.lockedmethod
def _run_is_done(self, file_list):
# Both these are kept for hooks that are run as a separate tree traversal.
self._file_list = file_list
self._processed = True
def GetHooks(self, options):
"""Evaluates all hooks, and return them in a flat list.
RunOnDeps() must have been called before to load the DEPS.
"""
result = []
if not self.should_process or not self.should_recurse:
# Don't run the hook when it is above recursion_limit.
return result
# If "--force" was specified, run all hooks regardless of what files have
# changed.
if self.deps_hooks:
# TODO(maruel): If the user is using git, then we don't know
# what files have changed so we always run all hooks. It'd be nice to fix
# that.
result.extend(self.deps_hooks)
for s in self.dependencies:
result.extend(s.GetHooks(options))
return result
def RunHooksRecursively(self, options, progress):
assert self.hooks_ran == False
self._hooks_ran = True
hooks = self.GetHooks(options)
if progress:
progress._total = len(hooks)
for hook in hooks:
if progress:
progress.update(extra=hook.name or '')
hook.run()
if progress:
progress.end()
def RunPreDepsHooks(self):
assert self.processed
assert self.deps_parsed
assert not self.pre_deps_hooks_ran
assert not self.hooks_ran
for s in self.dependencies:
assert not s.processed
self._pre_deps_hooks_ran = True
for hook in self.pre_deps_hooks:
hook.run()
def GetCipdRoot(self):
if self.root is self:
# Let's not infinitely recurse. If this is root and isn't an
# instance of GClient, do nothing.
return None
return self.root.GetCipdRoot()
def subtree(self, include_all):
"""Breadth first recursion excluding root node."""
dependencies = self.dependencies
for d in dependencies:
if d.should_process or include_all:
yield d
for d in dependencies:
for i in d.subtree(include_all):
yield i
@gclient_utils.lockedmethod
def add_dependency(self, new_dep):
self._dependencies.append(new_dep)
@gclient_utils.lockedmethod
def _mark_as_parsed(self, new_hooks):
self._deps_hooks.extend(new_hooks)
self._deps_parsed = True
@property
@gclient_utils.lockedmethod
def dependencies(self):
return tuple(self._dependencies)
@property
@gclient_utils.lockedmethod
def deps_hooks(self):
return tuple(self._deps_hooks)
@property
@gclient_utils.lockedmethod
def pre_deps_hooks(self):
return tuple(self._pre_deps_hooks)
@property
@gclient_utils.lockedmethod
def deps_parsed(self):
"""This is purely for debugging purposes. It's not used anywhere."""
return self._deps_parsed
@property
@gclient_utils.lockedmethod
def processed(self):
return self._processed
@property
@gclient_utils.lockedmethod
def pre_deps_hooks_ran(self):
return self._pre_deps_hooks_ran
@property
@gclient_utils.lockedmethod
def hooks_ran(self):
return self._hooks_ran
@property
@gclient_utils.lockedmethod
def allowed_hosts(self):
return self._allowed_hosts
@property
@gclient_utils.lockedmethod
def file_list(self):
return tuple(self._file_list)
@property
def used_scm(self):
"""SCMWrapper instance for this dependency or None if not processed yet."""
return self._used_scm
@property
@gclient_utils.lockedmethod
def got_revision(self):
return self._got_revision
@property
def file_list_and_children(self):
result = list(self.file_list)
for d in self.dependencies:
result.extend(d.file_list_and_children)
return tuple(result)
def __str__(self):
out = []
for i in ('name', 'url', 'custom_deps',
'custom_vars', 'deps_hooks', 'file_list', 'should_process',
'processed', 'hooks_ran', 'deps_parsed', 'requirements',
'allowed_hosts'):
# First try the native property if it exists.
if hasattr(self, '_' + i):
value = getattr(self, '_' + i, False)
else:
value = getattr(self, i, False)
if value:
out.append('%s: %s' % (i, value))
for d in self.dependencies:
out.extend([' ' + x for x in str(d).splitlines()])
out.append('')
return '\n'.join(out)
def __repr__(self):
return '%s: %s' % (self.name, self.url)
def hierarchy(self, include_url=True):
"""Returns a human-readable hierarchical reference to a Dependency."""
def format_name(d):
if include_url:
return '%s(%s)' % (d.name, d.url)
return d.name
out = format_name(self)
i = self.parent
while i and i.name:
out = '%s -> %s' % (format_name(i), out)
i = i.parent
return out
def hierarchy_data(self):
"""Returns a machine-readable hierarchical reference to a Dependency."""
d = self
out = []
while d and d.name:
out.insert(0, (d.name, d.url))
d = d.parent
return tuple(out)
def get_builtin_vars(self):
return {
'checkout_android': 'android' in self.target_os,
'checkout_chromeos': 'chromeos' in self.target_os,
'checkout_fuchsia': 'fuchsia' in self.target_os,
'checkout_ios': 'ios' in self.target_os,
'checkout_linux': 'unix' in self.target_os,
'checkout_mac': 'mac' in self.target_os,
'checkout_win': 'win' in self.target_os,
'host_os': _detect_host_os(),
'checkout_arm': 'arm' in self.target_cpu,
'checkout_arm64': 'arm64' in self.target_cpu,
'checkout_x86': 'x86' in self.target_cpu,
'checkout_mips': 'mips' in self.target_cpu,
'checkout_mips64': 'mips64' in self.target_cpu,
'checkout_ppc': 'ppc' in self.target_cpu,
'checkout_s390': 's390' in self.target_cpu,
'checkout_x64': 'x64' in self.target_cpu,
'host_cpu': detect_host_arch.HostArch(),
}
def get_vars(self):
"""Returns a dictionary of effective variable values
(DEPS file contents with applied custom_vars overrides)."""
# Variable precedence (last has highest):
# - DEPS vars
# - parents, from first to last
# - built-in
# - custom_vars overrides
result = {}
result.update(self._vars)
if self.parent:
merge_vars(result, self.parent.get_vars())
# Provide some built-in variables.
result.update(self.get_builtin_vars())
merge_vars(result, self.custom_vars)
return result
_PLATFORM_MAPPING = {
'cygwin': 'win',
'darwin': 'mac',
'linux2': 'linux',
'linux': 'linux',
'win32': 'win',
'aix6': 'aix',
}
def merge_vars(result, new_vars):
for k, v in new_vars.items():
if k in result:
if isinstance(result[k], gclient_eval.ConstantString):
if isinstance(v, gclient_eval.ConstantString):
result[k] = v
else:
result[k].value = v
else:
result[k] = v
else:
result[k] = v
def _detect_host_os():
return _PLATFORM_MAPPING[sys.platform]
class GitDependency(Dependency):
"""A Dependency object that represents a single git checkout."""
#override
def GetScmName(self):
"""Always 'git'."""
return 'git'
#override
def CreateSCM(self, out_cb=None):
"""Create a Wrapper instance suitable for handling this git dependency."""
return gclient_scm.GitWrapper(
self.url, self.root.root_dir, self.name, self.outbuf, out_cb,
print_outbuf=self.print_outbuf)
class GClient(GitDependency):
"""Object that represent a gclient checkout. A tree of Dependency(), one per
solution or DEPS entry."""
DEPS_OS_CHOICES = {
"aix6": "unix",
"win32": "win",
"win": "win",
"cygwin": "win",
"darwin": "mac",
"mac": "mac",
"unix": "unix",
"linux": "unix",
"linux2": "unix",
"linux3": "unix",
"android": "android",
"ios": "ios",
"fuchsia": "fuchsia",
"chromeos": "chromeos",
}
DEFAULT_CLIENT_FILE_TEXT = ("""\
solutions = [
{ "name" : %(solution_name)r,
"url" : %(solution_url)r,
"deps_file" : %(deps_file)r,
"managed" : %(managed)r,
"custom_deps" : {
},
"custom_vars": %(custom_vars)r,
},
]
""")
DEFAULT_CLIENT_CACHE_DIR_TEXT = ("""\
cache_dir = %(cache_dir)r
""")
DEFAULT_SNAPSHOT_FILE_TEXT = ("""\
# Snapshot generated with gclient revinfo --snapshot
solutions = %(solution_list)s
""")
def __init__(self, root_dir, options):
# Do not change previous behavior. Only solution level and immediate DEPS
# are processed.
self._recursion_limit = 2
super(GClient, self).__init__(
parent=None,
name=None,
url=None,
managed=True,
custom_deps=None,
custom_vars=None,
custom_hooks=None,
deps_file='unused',
should_process=True,
should_recurse=True,
relative=None,
condition=None,
print_outbuf=True)
self._options = options
if options.deps_os:
enforced_os = options.deps_os.split(',')
else:
enforced_os = [self.DEPS_OS_CHOICES.get(sys.platform, 'unix')]
if 'all' in enforced_os:
enforced_os = self.DEPS_OS_CHOICES.values()
self._enforced_os = tuple(set(enforced_os))
self._enforced_cpu = (detect_host_arch.HostArch(), )
self._root_dir = root_dir
self._cipd_root = None
self.config_content = None
def _CheckConfig(self):
"""Verify that the config matches the state of the existing checked-out
solutions."""
for dep in self.dependencies:
if dep.managed and dep.url:
scm = dep.CreateSCM()
actual_url = scm.GetActualRemoteURL(self._options)
if actual_url and not scm.DoesRemoteURLMatch(self._options):
mirror = scm.GetCacheMirror()
if mirror:
mirror_string = '%s (exists=%s)' % (mirror.mirror_path,
mirror.exists())
else:
mirror_string = 'not used'
raise gclient_utils.Error(
'''
Your .gclient file seems to be broken. The requested URL is different from what
is actually checked out in %(checkout_path)s.
The .gclient file contains:
URL: %(expected_url)s (%(expected_scm)s)
Cache mirror: %(mirror_string)s
The local checkout in %(checkout_path)s reports:
%(actual_url)s (%(actual_scm)s)
You should ensure that the URL listed in .gclient is correct and either change
it or fix the checkout.
''' % {
'checkout_path': os.path.join(self.root_dir, dep.name),
'expected_url': dep.url,
'expected_scm': dep.GetScmName(),
'mirror_string': mirror_string,
'actual_url': actual_url,
'actual_scm': dep.GetScmName()
})
def SetConfig(self, content):
assert not self.dependencies
config_dict = {}
self.config_content = content
try:
exec(content, config_dict)
except SyntaxError as e:
gclient_utils.SyntaxErrorToError('.gclient', e)
# Append any target OS that is not already being enforced to the tuple.
target_os = config_dict.get('target_os', [])
if config_dict.get('target_os_only', False):
self._enforced_os = tuple(set(target_os))
else:
self._enforced_os = tuple(set(self._enforced_os).union(target_os))
# Append any target CPU that is not already being enforced to the tuple.
target_cpu = config_dict.get('target_cpu', [])
if config_dict.get('target_cpu_only', False):
self._enforced_cpu = tuple(set(target_cpu))
else:
self._enforced_cpu = tuple(set(self._enforced_cpu).union(target_cpu))
cache_dir = config_dict.get('cache_dir', UNSET_CACHE_DIR)
if cache_dir is not UNSET_CACHE_DIR:
if cache_dir:
cache_dir = os.path.join(self.root_dir, cache_dir)
cache_dir = os.path.abspath(cache_dir)
git_cache.Mirror.SetCachePath(cache_dir)
if not target_os and config_dict.get('target_os_only', False):
raise gclient_utils.Error('Can\'t use target_os_only if target_os is '
'not specified')
if not target_cpu and config_dict.get('target_cpu_only', False):
raise gclient_utils.Error('Can\'t use target_cpu_only if target_cpu is '
'not specified')
deps_to_add = []
for s in config_dict.get('solutions', []):
try:
deps_to_add.append(GitDependency(
parent=self,
name=s['name'],
url=s['url'],
managed=s.get('managed', True),
custom_deps=s.get('custom_deps', {}),
custom_vars=s.get('custom_vars', {}),
custom_hooks=s.get('custom_hooks', []),
deps_file=s.get('deps_file', 'DEPS'),
should_process=True,
should_recurse=True,
relative=None,
condition=None,
print_outbuf=True))
except KeyError:
raise gclient_utils.Error('Invalid .gclient file. Solution is '
'incomplete: %s' % s)
metrics.collector.add(
'project_urls',
[
dep.FuzzyMatchUrl(metrics_utils.KNOWN_PROJECT_URLS)
for dep in deps_to_add
if dep.FuzzyMatchUrl(metrics_utils.KNOWN_PROJECT_URLS)
]
)
self.add_dependencies_and_close(deps_to_add, config_dict.get('hooks', []))
logging.info('SetConfig() done')
def SaveConfig(self):
gclient_utils.FileWrite(os.path.join(self.root_dir,
self._options.config_filename),
self.config_content)
@staticmethod
def LoadCurrentConfig(options):
"""Searches for and loads a .gclient file relative to the current working
dir. Returns a GClient object."""
if options.spec:
client = GClient('.', options)
client.SetConfig(options.spec)
else:
if options.verbose:
print('Looking for %s starting from %s\n' % (
options.config_filename, os.getcwd()))
path = gclient_paths.FindGclientRoot(os.getcwd(), options.config_filename)
if not path:
if options.verbose:
print('Couldn\'t find configuration file.')
return None
client = GClient(path, options)
client.SetConfig(gclient_utils.FileRead(
os.path.join(path, options.config_filename)))
if (options.revisions and
len(client.dependencies) > 1 and
any('@' not in r for r in options.revisions)):
print(
('You must specify the full solution name like --revision %s@%s\n'
'when you have multiple solutions setup in your .gclient file.\n'
'Other solutions present are: %s.') % (
client.dependencies[0].name,
options.revisions[0],
', '.join(s.name for s in client.dependencies[1:])),
file=sys.stderr)
return client
def SetDefaultConfig(self, solution_name, deps_file, solution_url,
managed=True, cache_dir=UNSET_CACHE_DIR,
custom_vars=None):
text = self.DEFAULT_CLIENT_FILE_TEXT
format_dict = {
'solution_name': solution_name,
'solution_url': solution_url,
'deps_file': deps_file,
'managed': managed,
'custom_vars': custom_vars or {},
}
if cache_dir is not UNSET_CACHE_DIR:
text += self.DEFAULT_CLIENT_CACHE_DIR_TEXT
format_dict['cache_dir'] = cache_dir
self.SetConfig(text % format_dict)
def _SaveEntries(self):
"""Creates a .gclient_entries file to record the list of unique checkouts.
The .gclient_entries file lives in the same directory as .gclient.
"""
# Sometimes pprint.pformat will use {', sometimes it'll use { ' ... It
# makes testing a bit too fun.
result = 'entries = {\n'
for entry in self.root.subtree(False):
result += ' %s: %s,\n' % (pprint.pformat(entry.name),
pprint.pformat(entry.url))
result += '}\n'
file_path = os.path.join(self.root_dir, self._options.entries_filename)
logging.debug(result)
gclient_utils.FileWrite(file_path, result)
def _ReadEntries(self):
"""Read the .gclient_entries file for the given client.
Returns:
A sequence of solution names, which will be empty if there is the
entries file hasn't been created yet.
"""
scope = {}
filename = os.path.join(self.root_dir, self._options.entries_filename)
if not os.path.exists(filename):
return {}
try:
exec(gclient_utils.FileRead(filename), scope)
except SyntaxError as e:
gclient_utils.SyntaxErrorToError(filename, e)
return scope.get('entries', {})
def _EnforceRevisions(self):
"""Checks for revision overrides."""
revision_overrides = {}
if self._options.head:
return revision_overrides
if not self._options.revisions:
return revision_overrides
solutions_names = [s.name for s in self.dependencies]
index = 0
for revision in self._options.revisions:
if not '@' in revision:
# Support for --revision 123
revision = '%s@%s' % (solutions_names[index], revision)
name, rev = revision.split('@', 1)
revision_overrides[name] = rev
index += 1
return revision_overrides
def _EnforcePatchRefsAndBranches(self):
"""Checks for patch refs."""
patch_refs = {}
target_branches = {}
if not self._options.patch_refs:
return patch_refs, target_branches
for given_patch_ref in self._options.patch_refs:
patch_repo, _, patch_ref = given_patch_ref.partition('@')
if not patch_repo or not patch_ref or ':' not in patch_ref:
raise gclient_utils.Error(
'Wrong revision format: %s should be of the form '
'patch_repo@target_branch:patch_ref.' % given_patch_ref)
target_branch, _, patch_ref = patch_ref.partition(':')
target_branches[patch_repo] = target_branch
patch_refs[patch_repo] = patch_ref
return patch_refs, target_branches
def _RemoveUnversionedGitDirs(self):
"""Remove directories that are no longer part of the checkout.
Notify the user if there is an orphaned entry in their working copy.
Only delete the directory if there are no changes in it, and
delete_unversioned_trees is set to true.
"""
entries = [i.name for i in self.root.subtree(False) if i.url]
full_entries = [os.path.join(self.root_dir, e.replace('/', os.path.sep))
for e in entries]
for entry, prev_url in self._ReadEntries().items():
if not prev_url:
# entry must have been overridden via .gclient custom_deps
continue
# Fix path separator on Windows.
entry_fixed = entry.replace('/', os.path.sep)
e_dir = os.path.join(self.root_dir, entry_fixed)
# Use entry and not entry_fixed there.
if (entry not in entries and
(not any(path.startswith(entry + '/') for path in entries)) and
os.path.exists(e_dir)):
# The entry has been removed from DEPS.
scm = gclient_scm.GitWrapper(
prev_url, self.root_dir, entry_fixed, self.outbuf)
# Check to see if this directory is now part of a higher-up checkout.
scm_root = None
try:
scm_root = gclient_scm.scm.GIT.GetCheckoutRoot(scm.checkout_path)
except subprocess2.CalledProcessError:
pass
if not scm_root:
logging.warning('Could not find checkout root for %s. Unable to '
'determine whether it is part of a higher-level '
'checkout, so not removing.' % entry)
continue
# This is to handle the case of third_party/WebKit migrating from
# being a DEPS entry to being part of the main project.
# If the subproject is a Git project, we need to remove its .git
# folder. Otherwise git operations on that folder will have different
# effects depending on the current working directory.
if os.path.abspath(scm_root) == os.path.abspath(e_dir):
e_par_dir = os.path.join(e_dir, os.pardir)
if gclient_scm.scm.GIT.IsInsideWorkTree(e_par_dir):
par_scm_root = gclient_scm.scm.GIT.GetCheckoutRoot(e_par_dir)
# rel_e_dir : relative path of entry w.r.t. its parent repo.
rel_e_dir = os.path.relpath(e_dir, par_scm_root)
if gclient_scm.scm.GIT.IsDirectoryVersioned(
par_scm_root, rel_e_dir):
save_dir = scm.GetGitBackupDirPath()
# Remove any eventual stale backup dir for the same project.
if os.path.exists(save_dir):
gclient_utils.rmtree(save_dir)
os.rename(os.path.join(e_dir, '.git'), save_dir)
# When switching between the two states (entry/ is a subproject
# -> entry/ is part of the outer project), it is very likely
# that some files are changed in the checkout, unless we are
# jumping *exactly* across the commit which changed just DEPS.
# In such case we want to cleanup any eventual stale files
# (coming from the old subproject) in order to end up with a
# clean checkout.
gclient_scm.scm.GIT.CleanupDir(par_scm_root, rel_e_dir)
assert not os.path.exists(os.path.join(e_dir, '.git'))
print('\nWARNING: \'%s\' has been moved from DEPS to a higher '
'level checkout. The git folder containing all the local'
' branches has been saved to %s.\n'
'If you don\'t care about its state you can safely '
'remove that folder to free up space.' % (entry, save_dir))
continue
if scm_root in full_entries:
logging.info('%s is part of a higher level checkout, not removing',
scm.GetCheckoutRoot())
continue
file_list = []
scm.status(self._options, [], file_list)
modified_files = file_list != []
if (not self._options.delete_unversioned_trees or
(modified_files and not self._options.force)):
# There are modified files in this entry. Keep warning until
# removed.
self.add_dependency(
GitDependency(
parent=self,
name=entry,
url=prev_url,
managed=False,
custom_deps={},
custom_vars={},
custom_hooks=[],
deps_file=None,
should_process=True,
should_recurse=False,
relative=None,
condition=None))
if modified_files and self._options.delete_unversioned_trees:
print('\nWARNING: \'%s\' is no longer part of this client.\n'
'Despite running \'gclient sync -D\' no action was taken '
'as there are modifications.\nIt is recommended you revert '
'all changes or run \'gclient sync -D --force\' next '
'time.' % entry_fixed)
else:
print('\nWARNING: \'%s\' is no longer part of this client.\n'
'It is recommended that you manually remove it or use '
'\'gclient sync -D\' next time.' % entry_fixed)
else:
# Delete the entry
print('\n________ deleting \'%s\' in \'%s\'' % (
entry_fixed, self.root_dir))
gclient_utils.rmtree(e_dir)
# record the current list of entries for next time
self._SaveEntries()
def RunOnDeps(self, command, args, ignore_requirements=False, progress=True):
"""Runs a command on each dependency in a client and its dependencies.
Args:
command: The command to use (e.g., 'status' or 'diff')
args: list of str - extra arguments to add to the command line.
"""
if not self.dependencies:
raise gclient_utils.Error('No solution specified')
revision_overrides = {}
patch_refs = {}
target_branches = {}
# It's unnecessary to check for revision overrides for 'recurse'.
# Save a few seconds by not calling _EnforceRevisions() in that case.
if command not in ('diff', 'recurse', 'runhooks', 'status', 'revert',
'validate'):
self._CheckConfig()
revision_overrides = self._EnforceRevisions()
if command == 'update':
patch_refs, target_branches = self._EnforcePatchRefsAndBranches()
# Disable progress for non-tty stdout.
should_show_progress = (
setup_color.IS_TTY and not self._options.verbose and progress)
pm = None
if should_show_progress:
if command in ('update', 'revert'):
pm = Progress('Syncing projects', 1)
elif command in ('recurse', 'validate'):
pm = Progress(' '.join(args), 1)
work_queue = gclient_utils.ExecutionQueue(
self._options.jobs, pm, ignore_requirements=ignore_requirements,
verbose=self._options.verbose)
for s in self.dependencies:
if s.should_process:
work_queue.enqueue(s)
work_queue.flush(revision_overrides, command, args, options=self._options,
patch_refs=patch_refs, target_branches=target_branches)
if revision_overrides:
print('Please fix your script, having invalid --revision flags will soon '
'be considered an error.', file=sys.stderr)
if patch_refs:
raise gclient_utils.Error(
'The following --patch-ref flags were not used. Please fix it:\n%s' %
('\n'.join(
patch_repo + '@' + patch_ref
for patch_repo, patch_ref in patch_refs.items())))
# Once all the dependencies have been processed, it's now safe to write
# out the gn_args_file and run the hooks.
if command == 'update':
gn_args_dep = self.dependencies[0]
if gn_args_dep._gn_args_from:
deps_map = {dep.name: dep for dep in gn_args_dep.dependencies}
gn_args_dep = deps_map.get(gn_args_dep._gn_args_from)
if gn_args_dep and gn_args_dep.HasGNArgsFile():
gn_args_dep.WriteGNArgsFile()
self._RemoveUnversionedGitDirs()
# Sync CIPD dependencies once removed deps are deleted. In case a git
# dependency was moved to CIPD, we want to remove the old git directory
# first and then sync the CIPD dep.
if self._cipd_root:
self._cipd_root.run(command)
if not self._options.nohooks:
if should_show_progress:
pm = Progress('Running hooks', 1)
self.RunHooksRecursively(self._options, pm)
return 0
def PrintRevInfo(self):
if not self.dependencies:
raise gclient_utils.Error('No solution specified')
# Load all the settings.
work_queue = gclient_utils.ExecutionQueue(
self._options.jobs, None, False, verbose=self._options.verbose)
for s in self.dependencies:
if s.should_process:
work_queue.enqueue(s)
work_queue.flush({}, None, [], options=self._options, patch_refs=None,
target_branches=None)
def ShouldPrintRevision(dep):
return (not self._options.filter
or dep.FuzzyMatchUrl(self._options.filter))
if self._options.snapshot:
json_output = []
# First level at .gclient
for d in self.dependencies:
entries = {}
def GrabDeps(dep):
"""Recursively grab dependencies."""
for rec_d in dep.dependencies:
rec_d.PinToActualRevision()
if ShouldPrintRevision(rec_d):
entries[rec_d.name] = rec_d.url
GrabDeps(rec_d)
GrabDeps(d)
json_output.append({
'name': d.name,
'solution_url': d.url,
'deps_file': d.deps_file,
'managed': d.managed,
'custom_deps': entries,
})
if self._options.output_json == '-':
print(json.dumps(json_output, indent=2, separators=(',', ': ')))
elif self._options.output_json:
with open(self._options.output_json, 'w') as f:
json.dump(json_output, f)
else:
# Print the snapshot configuration file
print(self.DEFAULT_SNAPSHOT_FILE_TEXT % {
'solution_list': pprint.pformat(json_output, indent=2),
})
else:
entries = {}
for d in self.root.subtree(False):
if self._options.actual:
d.PinToActualRevision()
if ShouldPrintRevision(d):
entries[d.name] = d.url
if self._options.output_json:
json_output = {
name: {
'url': rev.split('@')[0] if rev else None,
'rev': rev.split('@')[1] if rev and '@' in rev else None,
}
for name, rev in entries.items()
}
if self._options.output_json == '-':
print(json.dumps(json_output, indent=2, separators=(',', ': ')))
else:
with open(self._options.output_json, 'w') as f:
json.dump(json_output, f)
else:
keys = sorted(entries.keys())
for x in keys:
print('%s: %s' % (x, entries[x]))
logging.info(str(self))
def ParseDepsFile(self):
"""No DEPS to parse for a .gclient file."""
raise gclient_utils.Error('Internal error')
def PrintLocationAndContents(self):
# Print out the .gclient file. This is longer than if we just printed the
# client dict, but more legible, and it might contain helpful comments.
print('Loaded .gclient config in %s:\n%s' % (
self.root_dir, self.config_content))
def GetCipdRoot(self):
if not self._cipd_root:
self._cipd_root = gclient_scm.CipdRoot(
self.root_dir,
# TODO(jbudorick): Support other service URLs as necessary.
# Service URLs should be constant over the scope of a cipd
# root, so a var per DEPS file specifying the service URL
# should suffice.
'https://chrome-infra-packages.appspot.com')
return self._cipd_root
@property
def root_dir(self):
"""Root directory of gclient checkout."""
return self._root_dir
@property
def enforced_os(self):
"""What deps_os entries that are to be parsed."""
return self._enforced_os
@property
def target_os(self):
return self._enforced_os
@property
def target_cpu(self):
return self._enforced_cpu
class CipdDependency(Dependency):
"""A Dependency object that represents a single CIPD package."""
def __init__(
self, parent, name, dep_value, cipd_root,
custom_vars, should_process, relative, condition):
package = dep_value['package']
version = dep_value['version']
url = urlparse.urljoin(
cipd_root.service_url, '%s@%s' % (package, version))
super(CipdDependency, self).__init__(
parent=parent,
name=name + ':' + package,
url=url,
managed=None,
custom_deps=None,
custom_vars=custom_vars,
custom_hooks=None,
deps_file=None,
should_process=should_process,
should_recurse=False,
relative=relative,
condition=condition)
self._cipd_package = None
self._cipd_root = cipd_root
# CIPD wants /-separated paths, even on Windows.
native_subdir_path = os.path.relpath(
os.path.join(self.root.root_dir, name), cipd_root.root_dir)
self._cipd_subdir = posixpath.join(*native_subdir_path.split(os.sep))
self._package_name = package
self._package_version = version
#override
def run(self, revision_overrides, command, args, work_queue, options,
patch_refs, target_branches):
"""Runs |command| then parse the DEPS file."""
logging.info('CipdDependency(%s).run()' % self.name)
if not self.should_process:
return
self._CreatePackageIfNecessary()
super(CipdDependency, self).run(revision_overrides, command, args,
work_queue, options, patch_refs,
target_branches)
def _CreatePackageIfNecessary(self):
# We lazily create the CIPD package to make sure that only packages
# that we want (as opposed to all packages defined in all DEPS files
# we parse) get added to the root and subsequently ensured.
if not self._cipd_package:
self._cipd_package = self._cipd_root.add_package(
self._cipd_subdir, self._package_name, self._package_version)
def ParseDepsFile(self):
"""CIPD dependencies are not currently allowed to have nested deps."""
self.add_dependencies_and_close([], [])
#override
def verify_validity(self):
"""CIPD dependencies allow duplicate name for packages in same directory."""
logging.info('Dependency(%s).verify_validity()' % self.name)
return True
#override
def GetScmName(self):
"""Always 'cipd'."""
return 'cipd'
#override
def CreateSCM(self, out_cb=None):
"""Create a Wrapper instance suitable for handling this CIPD dependency."""
self._CreatePackageIfNecessary()
return gclient_scm.CipdWrapper(
self.url, self.root.root_dir, self.name, self.outbuf, out_cb,
root=self._cipd_root, package=self._cipd_package)
def hierarchy(self, include_url=False):
return self.parent.hierarchy(include_url) + ' -> ' + self._cipd_subdir
def ToLines(self):
"""Return a list of lines representing this in a DEPS file."""
def escape_cipd_var(package):
return package.replace('{', '{{').replace('}', '}}')
s = []
self._CreatePackageIfNecessary()
if self._cipd_package.authority_for_subdir:
condition_part = ([' "condition": %r,' % self.condition]
if self.condition else [])
s.extend([
' # %s' % self.hierarchy(include_url=False),
' "%s": {' % (self.name.split(':')[0],),
' "packages": [',
])
for p in sorted(
self._cipd_root.packages(self._cipd_subdir),
key=lambda x: x.name):
s.extend([
' {',
' "package": "%s",' % escape_cipd_var(p.name),
' "version": "%s",' % p.version,
' },',
])
s.extend([
' ],',
' "dep_type": "cipd",',
] + condition_part + [
' },',
'',
])
return s
#### gclient commands.
@subcommand.usage('[command] [args ...]')
@metrics.collector.collect_metrics('gclient recurse')
def CMDrecurse(parser, args):
"""Operates [command args ...] on all the dependencies.
Runs a shell command on all entries.
Sets GCLIENT_DEP_PATH environment variable as the dep's relative location to
root directory of the checkout.
"""
# Stop parsing at the first non-arg so that these go through to the command
parser.disable_interspersed_args()
parser.add_option('-s', '--scm', action='append', default=[],
help='Choose scm types to operate upon.')
parser.add_option('-i', '--ignore', action='store_true',
help='Ignore non-zero return codes from subcommands.')
parser.add_option('--prepend-dir', action='store_true',
help='Prepend relative dir for use with git <cmd> --null.')
parser.add_option('--no-progress', action='store_true',
help='Disable progress bar that shows sub-command updates')
options, args = parser.parse_args(args)
if not args:
print('Need to supply a command!', file=sys.stderr)
return 1
root_and_entries = gclient_utils.GetGClientRootAndEntries()
if not root_and_entries:
print(
'You need to run gclient sync at least once to use \'recurse\'.\n'
'This is because .gclient_entries needs to exist and be up to date.',
file=sys.stderr)
return 1
# Normalize options.scm to a set()
scm_set = set()
for scm in options.scm:
scm_set.update(scm.split(','))
options.scm = scm_set
options.nohooks = True
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
return client.RunOnDeps('recurse', args, ignore_requirements=True,
progress=not options.no_progress)
@subcommand.usage('[args ...]')
@metrics.collector.collect_metrics('gclient fetch')
def CMDfetch(parser, args):
"""Fetches upstream commits for all modules.
Completely git-specific. Simply runs 'git fetch [args ...]' for each module.
"""
(options, args) = parser.parse_args(args)
return CMDrecurse(OptionParser(), [
'--jobs=%d' % options.jobs, '--scm=git', 'git', 'fetch'] + args)
class Flattener(object):
"""Flattens a gclient solution."""
def __init__(self, client, pin_all_deps=False):
"""Constructor.
Arguments:
client (GClient): client to flatten
pin_all_deps (bool): whether to pin all deps, even if they're not pinned
in DEPS
"""
self._client = client
self._deps_string = None
self._deps_files = set()
self._allowed_hosts = set()
self._deps = {}
self._hooks = []
self._pre_deps_hooks = []
self._vars = {}
self._flatten(pin_all_deps=pin_all_deps)
@property
def deps_string(self):
assert self._deps_string is not None
return self._deps_string
@property
def deps_files(self):
return self._deps_files
def _pin_dep(self, dep):
"""Pins a dependency to specific full revision sha.
Arguments:
dep (Dependency): dependency to process
"""
if dep.url is None:
return
# Make sure the revision is always fully specified (a hash),
# as opposed to refs or tags which might change. Similarly,
# shortened shas might become ambiguous; make sure to always
# use full one for pinning.
revision = gclient_utils.SplitUrlRevision(dep.url)[1]
if not revision or not gclient_utils.IsFullGitSha(revision):
dep.PinToActualRevision()
def _flatten(self, pin_all_deps=False):
"""Runs the flattener. Saves resulting DEPS string.
Arguments:
pin_all_deps (bool): whether to pin all deps, even if they're not pinned
in DEPS
"""
for solution in self._client.dependencies:
self._add_dep(solution)
self._flatten_dep(solution)
if pin_all_deps:
for dep in self._deps.values():
self._pin_dep(dep)
def add_deps_file(dep):
# Only include DEPS files referenced by recursedeps.
if not dep.should_recurse:
return
deps_file = dep.deps_file
deps_path = os.path.join(self._client.root_dir, dep.name, deps_file)
if not os.path.exists(deps_path):
# gclient has a fallback that if deps_file doesn't exist, it'll try
# DEPS. Do the same here.
deps_file = 'DEPS'
deps_path = os.path.join(self._client.root_dir, dep.name, deps_file)
if not os.path.exists(deps_path):
return
assert dep.url
self._deps_files.add((dep.url, deps_file, dep.hierarchy_data()))
for dep in self._deps.values():
add_deps_file(dep)
gn_args_dep = self._deps.get(self._client.dependencies[0]._gn_args_from,
self._client.dependencies[0])
self._deps_string = '\n'.join(
_GNSettingsToLines(gn_args_dep._gn_args_file, gn_args_dep._gn_args) +
_AllowedHostsToLines(self._allowed_hosts) +
_DepsToLines(self._deps) +
_HooksToLines('hooks', self._hooks) +
_HooksToLines('pre_deps_hooks', self._pre_deps_hooks) +
_VarsToLines(self._vars) +
['# %s, %s' % (url, deps_file)
for url, deps_file, _ in sorted(self._deps_files)] +
['']) # Ensure newline at end of file.
def _add_dep(self, dep):
"""Helper to add a dependency to flattened DEPS.
Arguments:
dep (Dependency): dependency to add
"""
assert dep.name not in self._deps or self._deps.get(dep.name) == dep, (
dep.name, self._deps.get(dep.name))
if dep.url:
self._deps[dep.name] = dep
def _flatten_dep(self, dep):
"""Visits a dependency in order to flatten it (see CMDflatten).
Arguments:
dep (Dependency): dependency to process
"""
logging.debug('_flatten_dep(%s)', dep.name)
assert dep.deps_parsed, (
"Attempted to flatten %s but it has not been processed." % dep.name)
self._allowed_hosts.update(dep.allowed_hosts)
# Only include vars explicitly listed in the DEPS files or gclient solution,
# not automatic, local overrides (i.e. not all of dep.get_vars()).
hierarchy = dep.hierarchy(include_url=False)
for key, value in dep._vars.items():
# Make sure there are no conflicting variables. It is fine however
# to use same variable name, as long as the value is consistent.
assert key not in self._vars or self._vars[key][1] == value, (
"dep:%s key:%s value:%s != %s" % (
dep.name, key, value, self._vars[key][1]))
self._vars[key] = (hierarchy, value)
# Override explicit custom variables.
for key, value in dep.custom_vars.items():
# Do custom_vars that don't correspond to DEPS vars ever make sense? DEPS
# conditionals shouldn't be using vars that aren't also defined in the
# DEPS (presubmit actually disallows this), so any new custom_var must be
# unused in the DEPS, so no need to add it to the flattened output either.
if key not in self._vars:
continue
# Don't "override" existing vars if it's actually the same value.
if self._vars[key][1] == value:
continue
# Anything else is overriding a default value from the DEPS.
self._vars[key] = (hierarchy + ' [custom_var override]', value)
self._pre_deps_hooks.extend([(dep, hook) for hook in dep.pre_deps_hooks])
self._hooks.extend([(dep, hook) for hook in dep.deps_hooks])
for sub_dep in dep.dependencies:
self._add_dep(sub_dep)
for d in dep.dependencies:
if d.should_recurse:
self._flatten_dep(d)
@metrics.collector.collect_metrics('gclient flatten')
def CMDflatten(parser, args):
"""Flattens the solutions into a single DEPS file."""
parser.add_option('--output-deps', help='Path to the output DEPS file')
parser.add_option(
'--output-deps-files',
help=('Path to the output metadata about DEPS files referenced by '
'recursedeps.'))
parser.add_option(
'--pin-all-deps', action='store_true',
help=('Pin all deps, even if not pinned in DEPS. CAVEAT: only does so '
'for checked out deps, NOT deps_os.'))
options, args = parser.parse_args(args)
options.nohooks = True
options.process_all_deps = True
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
# Only print progress if we're writing to a file. Otherwise, progress updates
# could obscure intended output.
code = client.RunOnDeps('flatten', args, progress=options.output_deps)
if code != 0:
return code
flattener = Flattener(client, pin_all_deps=options.pin_all_deps)
if options.output_deps:
with open(options.output_deps, 'w') as f:
f.write(flattener.deps_string)
else:
print(flattener.deps_string)
deps_files = [{'url': d[0], 'deps_file': d[1], 'hierarchy': d[2]}
for d in sorted(flattener.deps_files)]
if options.output_deps_files:
with open(options.output_deps_files, 'w') as f:
json.dump(deps_files, f)
return 0
def _GNSettingsToLines(gn_args_file, gn_args):
s = []
if gn_args_file:
s.extend([
'gclient_gn_args_file = "%s"' % gn_args_file,
'gclient_gn_args = %r' % gn_args,
])
return s
def _AllowedHostsToLines(allowed_hosts):
"""Converts |allowed_hosts| set to list of lines for output."""
if not allowed_hosts:
return []
s = ['allowed_hosts = [']
for h in sorted(allowed_hosts):
s.append(' "%s",' % h)
s.extend([']', ''])
return s
def _DepsToLines(deps):
"""Converts |deps| dict to list of lines for output."""
if not deps:
return []
s = ['deps = {']
for _, dep in sorted(deps.items()):
s.extend(dep.ToLines())
s.extend(['}', ''])
return s
def _DepsOsToLines(deps_os):
"""Converts |deps_os| dict to list of lines for output."""
if not deps_os:
return []
s = ['deps_os = {']
for dep_os, os_deps in sorted(deps_os.items()):
s.append(' "%s": {' % dep_os)
for name, dep in sorted(os_deps.items()):
condition_part = ([' "condition": %r,' % dep.condition]
if dep.condition else [])
s.extend([
' # %s' % dep.hierarchy(include_url=False),
' "%s": {' % (name,),
' "url": "%s",' % (dep.url,),
] + condition_part + [
' },',
'',
])
s.extend([' },', ''])
s.extend(['}', ''])
return s
def _HooksToLines(name, hooks):
"""Converts |hooks| list to list of lines for output."""
if not hooks:
return []
s = ['%s = [' % name]
for dep, hook in hooks:
s.extend([
' # %s' % dep.hierarchy(include_url=False),
' {',
])
if hook.name is not None:
s.append(' "name": "%s",' % hook.name)
if hook.pattern is not None:
s.append(' "pattern": "%s",' % hook.pattern)
if hook.condition is not None:
s.append(' "condition": %r,' % hook.condition)
# Flattened hooks need to be written relative to the root gclient dir
cwd = os.path.relpath(os.path.normpath(hook.effective_cwd))
s.extend(
[' "cwd": "%s",' % cwd] +
[' "action": ['] +
[' "%s",' % arg for arg in hook.action] +
[' ]', ' },', '']
)
s.extend([']', ''])
return s
def _HooksOsToLines(hooks_os):
"""Converts |hooks| list to list of lines for output."""
if not hooks_os:
return []
s = ['hooks_os = {']
for hook_os, os_hooks in hooks_os.items():
s.append(' "%s": [' % hook_os)
for dep, hook in os_hooks:
s.extend([
' # %s' % dep.hierarchy(include_url=False),
' {',
])
if hook.name is not None:
s.append(' "name": "%s",' % hook.name)
if hook.pattern is not None:
s.append(' "pattern": "%s",' % hook.pattern)
if hook.condition is not None:
s.append(' "condition": %r,' % hook.condition)
# Flattened hooks need to be written relative to the root gclient dir
cwd = os.path.relpath(os.path.normpath(hook.effective_cwd))
s.extend(
[' "cwd": "%s",' % cwd] +
[' "action": ['] +
[' "%s",' % arg for arg in hook.action] +
[' ]', ' },', '']
)
s.extend([' ],', ''])
s.extend(['}', ''])
return s
def _VarsToLines(variables):
"""Converts |variables| dict to list of lines for output."""
if not variables:
return []
s = ['vars = {']
for key, tup in sorted(variables.items()):
hierarchy, value = tup
s.extend([
' # %s' % hierarchy,
' "%s": %r,' % (key, value),
'',
])
s.extend(['}', ''])
return s
@metrics.collector.collect_metrics('gclient grep')
def CMDgrep(parser, args):
"""Greps through git repos managed by gclient.
Runs 'git grep [args...]' for each module.
"""
# We can't use optparse because it will try to parse arguments sent
# to git grep and throw an error. :-(
if not args or re.match('(-h|--help)$', args[0]):
print(
'Usage: gclient grep [-j <N>] git-grep-args...\n\n'
'Example: "gclient grep -j10 -A2 RefCountedBase" runs\n"git grep '
'-A2 RefCountedBase" on each of gclient\'s git\nrepos with up to '
'10 jobs.\n\nBonus: page output by appending "|& less -FRSX" to the'
' end of your query.',
file=sys.stderr)
return 1
jobs_arg = ['--jobs=1']
if re.match(r'(-j|--jobs=)\d+$', args[0]):
jobs_arg, args = args[:1], args[1:]
elif re.match(r'(-j|--jobs)$', args[0]):
jobs_arg, args = args[:2], args[2:]
return CMDrecurse(
parser,
jobs_arg + ['--ignore', '--prepend-dir', '--no-progress', '--scm=git',
'git', 'grep', '--null', '--color=Always'] + args)
@metrics.collector.collect_metrics('gclient root')
def CMDroot(parser, args):
"""Outputs the solution root (or current dir if there isn't one)."""
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if client:
print(os.path.abspath(client.root_dir))
else:
print(os.path.abspath('.'))
@subcommand.usage('[url]')
@metrics.collector.collect_metrics('gclient config')
def CMDconfig(parser, args):
"""Creates a .gclient file in the current directory.
This specifies the configuration for further commands. After update/sync,
top-level DEPS files in each module are read to determine dependent
modules to operate on as well. If optional [url] parameter is
provided, then configuration is read from a specified Subversion server
URL.
"""
# We do a little dance with the --gclientfile option. 'gclient config' is the
# only command where it's acceptable to have both '--gclientfile' and '--spec'
# arguments. So, we temporarily stash any --gclientfile parameter into
# options.output_config_file until after the (gclientfile xor spec) error
# check.
parser.remove_option('--gclientfile')
parser.add_option('--gclientfile', dest='output_config_file',
help='Specify an alternate .gclient file')
parser.add_option('--name',
help='overrides the default name for the solution')
parser.add_option('--deps-file', default='DEPS',
help='overrides the default name for the DEPS file for the '
'main solutions and all sub-dependencies')
parser.add_option('--unmanaged', action='store_true', default=False,
help='overrides the default behavior to make it possible '
'to have the main solution untouched by gclient '
'(gclient will check out unmanaged dependencies but '
'will never sync them)')
parser.add_option('--cache-dir', default=UNSET_CACHE_DIR,
help='Cache all git repos into this dir and do shared '
'clones from the cache, instead of cloning directly '
'from the remote. Pass "None" to disable cache, even '
'if globally enabled due to $GIT_CACHE_PATH.')
parser.add_option('--custom-var', action='append', dest='custom_vars',
default=[],
help='overrides variables; key=value syntax')
parser.set_defaults(config_filename=None)
(options, args) = parser.parse_args(args)
if options.output_config_file:
setattr(options, 'config_filename', getattr(options, 'output_config_file'))
if ((options.spec and args) or len(args) > 2 or
(not options.spec and not args)):
parser.error('Inconsistent arguments. Use either --spec or one or 2 args')
if (options.cache_dir is not UNSET_CACHE_DIR
and options.cache_dir.lower() == 'none'):
options.cache_dir = None
custom_vars = {}
for arg in options.custom_vars:
kv = arg.split('=', 1)
if len(kv) != 2:
parser.error('Invalid --custom-var argument: %r' % arg)
custom_vars[kv[0]] = gclient_eval.EvaluateCondition(kv[1], {})
client = GClient('.', options)
if options.spec:
client.SetConfig(options.spec)
else:
base_url = args[0].rstrip('/')
if not options.name:
name = base_url.split('/')[-1]
if name.endswith('.git'):
name = name[:-4]
else:
# specify an alternate relpath for the given URL.
name = options.name
if not os.path.abspath(os.path.join(os.getcwd(), name)).startswith(
os.getcwd()):
parser.error('Do not pass a relative path for --name.')
if any(x in ('..', '.', '/', '\\') for x in name.split(os.sep)):
parser.error('Do not include relative path components in --name.')
deps_file = options.deps_file
client.SetDefaultConfig(name, deps_file, base_url,
managed=not options.unmanaged,
cache_dir=options.cache_dir,
custom_vars=custom_vars)
client.SaveConfig()
return 0
@subcommand.epilog("""Example:
gclient pack > patch.txt
generate simple patch for configured client and dependences
""")
@metrics.collector.collect_metrics('gclient pack')
def CMDpack(parser, args):
"""Generates a patch which can be applied at the root of the tree.
Internally, runs 'git diff' on each checked out module and
dependencies, and performs minimal postprocessing of the output. The
resulting patch is printed to stdout and can be applied to a freshly
checked out tree via 'patch -p0 < patchfile'.
"""
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
parser.remove_option('--jobs')
(options, args) = parser.parse_args(args)
# Force jobs to 1 so the stdout is not annotated with the thread ids
options.jobs = 1
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
if options.verbose:
client.PrintLocationAndContents()
return client.RunOnDeps('pack', args)
@metrics.collector.collect_metrics('gclient status')
def CMDstatus(parser, args):
"""Shows modification status for every dependencies."""
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
if options.verbose:
client.PrintLocationAndContents()
return client.RunOnDeps('status', args)
@subcommand.epilog("""Examples:
gclient sync
update files from SCM according to current configuration,
*for modules which have changed since last update or sync*
gclient sync --force
update files from SCM according to current configuration, for
all modules (useful for recovering files deleted from local copy)
gclient sync --revision src@GIT_COMMIT_OR_REF
update src directory to GIT_COMMIT_OR_REF
JSON output format:
If the --output-json option is specified, the following document structure will
be emitted to the provided file. 'null' entries may occur for subprojects which
are present in the gclient solution, but were not processed (due to custom_deps,
os_deps, etc.)
{
"solutions" : {
"<name>": { # <name> is the posix-normalized path to the solution.
"revision": [<git id hex string>|null],
"scm": ["git"|null],
}
}
}
""")
@metrics.collector.collect_metrics('gclient sync')
def CMDsync(parser, args):
"""Checkout/update all modules."""
parser.add_option('-f', '--force', action='store_true',
help='force update even for unchanged modules')
parser.add_option('-n', '--nohooks', action='store_true',
help='don\'t run hooks after the update is complete')
parser.add_option('-p', '--noprehooks', action='store_true',
help='don\'t run pre-DEPS hooks', default=False)
parser.add_option('-r', '--revision', action='append',
dest='revisions', metavar='REV', default=[],
help='Enforces git ref/hash for the solutions with the '
'format src@rev. The src@ part is optional and can be '
'skipped. You can also specify URLs instead of paths '
'and gclient will find the solution corresponding to '
'the given URL. If a path is also specified, the URL '
'takes precedence. -r can be used multiple times when '
'.gclient has multiple solutions configured, and will '
'work even if the src@ part is skipped. Revision '
'numbers (e.g. 31000 or r31000) are not supported.')
parser.add_option('--patch-ref', action='append',
dest='patch_refs', metavar='GERRIT_REF', default=[],
help='Patches the given reference with the format '
'dep@target-ref:patch-ref. '
'For |dep|, you can specify URLs as well as paths, '
'with URLs taking preference. '
'|patch-ref| will be applied to |dep|, rebased on top '
'of what |dep| was synced to, and a soft reset will '
'be done. Use --no-rebase-patch-ref and '
'--no-reset-patch-ref to disable this behavior. '
'|target-ref| is the target branch against which a '
'patch was created, it is used to determine which '
'commits from the |patch-ref| actually constitute a '
'patch.')
parser.add_option('--with_branch_heads', action='store_true',
help='Clone git "branch_heads" refspecs in addition to '
'the default refspecs. This adds about 1/2GB to a '
'full checkout. (git only)')
parser.add_option('--with_tags', action='store_true',
help='Clone git tags in addition to the default refspecs.')
parser.add_option('-H', '--head', action='store_true',
help='DEPRECATED: only made sense with safesync urls.')
parser.add_option('-D', '--delete_unversioned_trees', action='store_true',
help='Deletes from the working copy any dependencies that '
'have been removed since the last sync, as long as '
'there are no local modifications. When used with '
'--force, such dependencies are removed even if they '
'have local modifications. When used with --reset, '
'all untracked directories are removed from the '
'working copy, excluding those which are explicitly '
'ignored in the repository.')
parser.add_option('-R', '--reset', action='store_true',
help='resets any local changes before updating (git only)')
parser.add_option('-M', '--merge', action='store_true',
help='merge upstream changes instead of trying to '
'fast-forward or rebase')
parser.add_option('-A', '--auto_rebase', action='store_true',
help='Automatically rebase repositories against local '
'checkout during update (git only).')
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
parser.add_option('--process-all-deps', action='store_true',
help='Check out all deps, even for different OS-es, '
'or with conditions evaluating to false')
parser.add_option('--upstream', action='store_true',
help='Make repo state match upstream branch.')
parser.add_option('--output-json',
help='Output a json document to this path containing '
'summary information about the sync.')
parser.add_option('--no-history', action='store_true',
help='GIT ONLY - Reduces the size/time of the checkout at '
'the cost of no history. Requires Git 1.9+')
parser.add_option('--shallow', action='store_true',
help='GIT ONLY - Do a shallow clone into the cache dir. '
'Requires Git 1.9+')
parser.add_option('--no_bootstrap', '--no-bootstrap',
action='store_true',
help='Don\'t bootstrap from Google Storage.')
parser.add_option('--ignore_locks',
action='store_true',
help='No longer used.')
parser.add_option('--break_repo_locks',
action='store_true',
help='No longer used.')
parser.add_option('--lock_timeout', type='int', default=5000,
help='GIT ONLY - Deadline (in seconds) to wait for git '
'cache lock to become available. Default is %default.')
parser.add_option('--no-rebase-patch-ref', action='store_false',
dest='rebase_patch_ref', default=True,
help='Bypass rebase of the patch ref after checkout.')
parser.add_option('--no-reset-patch-ref', action='store_false',
dest='reset_patch_ref', default=True,
help='Bypass calling reset after patching the ref.')
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
if options.ignore_locks:
print('Warning: ignore_locks is no longer used. Please remove its usage.')
if options.break_repo_locks:
print('Warning: break_repo_locks is no longer used. Please remove its '
'usage.')
if options.revisions and options.head:
# TODO(maruel): Make it a parser.error if it doesn't break any builder.
print('Warning: you cannot use both --head and --revision')
if options.verbose:
client.PrintLocationAndContents()
ret = client.RunOnDeps('update', args)
if options.output_json:
slns = {}
for d in client.subtree(True):
normed = d.name.replace('\\', '/').rstrip('/') + '/'
slns[normed] = {
'revision': d.got_revision,
'scm': d.used_scm.name if d.used_scm else None,
'url': str(d.url) if d.url else None,
'was_processed': d.should_process,
}
with open(options.output_json, 'w') as f:
json.dump({'solutions': slns}, f)
return ret
CMDupdate = CMDsync
@metrics.collector.collect_metrics('gclient validate')
def CMDvalidate(parser, args):
"""Validates the .gclient and DEPS syntax."""
options, args = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
rv = client.RunOnDeps('validate', args)
if rv == 0:
print('validate: SUCCESS')
else:
print('validate: FAILURE')
return rv
@metrics.collector.collect_metrics('gclient diff')
def CMDdiff(parser, args):
"""Displays local diff for every dependencies."""
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
if options.verbose:
client.PrintLocationAndContents()
return client.RunOnDeps('diff', args)
@metrics.collector.collect_metrics('gclient revert')
def CMDrevert(parser, args):
"""Reverts all modifications in every dependencies.
That's the nuclear option to get back to a 'clean' state. It removes anything
that shows up in git status."""
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
parser.add_option('-n', '--nohooks', action='store_true',
help='don\'t run hooks after the revert is complete')
parser.add_option('-p', '--noprehooks', action='store_true',
help='don\'t run pre-DEPS hooks', default=False)
parser.add_option('--upstream', action='store_true',
help='Make repo state match upstream branch.')
parser.add_option('--break_repo_locks',
action='store_true',
help='No longer used.')
(options, args) = parser.parse_args(args)
if options.break_repo_locks:
print('Warning: break_repo_locks is no longer used. Please remove its ' +
'usage.')
# --force is implied.
options.force = True
options.reset = False
options.delete_unversioned_trees = False
options.merge = False
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
return client.RunOnDeps('revert', args)
@metrics.collector.collect_metrics('gclient runhooks')
def CMDrunhooks(parser, args):
"""Runs hooks for files that have been modified in the local working copy."""
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
parser.add_option('-f', '--force', action='store_true', default=True,
help='Deprecated. No effect.')
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
if options.verbose:
client.PrintLocationAndContents()
options.force = True
options.nohooks = False
return client.RunOnDeps('runhooks', args)
@metrics.collector.collect_metrics('gclient revinfo')
def CMDrevinfo(parser, args):
"""Outputs revision info mapping for the client and its dependencies.
This allows the capture of an overall 'revision' for the source tree that
can be used to reproduce the same tree in the future. It is only useful for
'unpinned dependencies', i.e. DEPS/deps references without a git hash.
A git branch name isn't 'pinned' since the actual commit can change.
"""
parser.add_option('--deps', dest='deps_os', metavar='OS_LIST',
help='override deps for the specified (comma-separated) '
'platform(s); \'all\' will process all deps_os '
'references')
parser.add_option('-a', '--actual', action='store_true',
help='gets the actual checked out revisions instead of the '
'ones specified in the DEPS and .gclient files')
parser.add_option('-s', '--snapshot', action='store_true',
help='creates a snapshot .gclient file of the current '
'version of all repositories to reproduce the tree, '
'implies -a')
parser.add_option('--filter', action='append', dest='filter',
help='Display revision information only for the specified '
'dependencies (filtered by URL or path).')
parser.add_option('--output-json',
help='Output a json document to this path containing '
'information about the revisions.')
parser.add_option('--ignore-dep-type', choices=['git', 'cipd'],
help='Specify to skip processing of a certain type of dep.')
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
client.PrintRevInfo()
return 0
@metrics.collector.collect_metrics('gclient getdep')
def CMDgetdep(parser, args):
"""Gets revision information and variable values from a DEPS file."""
parser.add_option('--var', action='append',
dest='vars', metavar='VAR', default=[],
help='Gets the value of a given variable.')
parser.add_option('-r', '--revision', action='append',
dest='getdep_revisions', metavar='DEP', default=[],
help='Gets the revision/version for the given dependency. '
'If it is a git dependency, dep must be a path. If it '
'is a CIPD dependency, dep must be of the form '
'path:package.')
parser.add_option('--deps-file', default='DEPS',
# TODO(ehmaldonado): Try to find the DEPS file pointed by
# .gclient first.
help='The DEPS file to be edited. Defaults to the DEPS '
'file in the current directory.')
(options, args) = parser.parse_args(args)
if not os.path.isfile(options.deps_file):
raise gclient_utils.Error(
'DEPS file %s does not exist.' % options.deps_file)
with open(options.deps_file) as f:
contents = f.read()
client = GClient.LoadCurrentConfig(options)
if client is not None:
builtin_vars = client.get_builtin_vars()
else:
logging.warning(
'Couldn\'t find a valid gclient config. Will attempt to parse the DEPS '
'file without support for built-in variables.')
builtin_vars = None
local_scope = gclient_eval.Exec(contents, options.deps_file,
builtin_vars=builtin_vars)
for var in options.vars:
print(gclient_eval.GetVar(local_scope, var))
for name in options.getdep_revisions:
if ':' in name:
name, _, package = name.partition(':')
if not name or not package:
parser.error(
'Wrong CIPD format: %s:%s should be of the form path:pkg.'
% (name, package))
print(gclient_eval.GetCIPD(local_scope, name, package))
else:
print(gclient_eval.GetRevision(local_scope, name))
@metrics.collector.collect_metrics('gclient setdep')
def CMDsetdep(parser, args):
"""Modifies dependency revisions and variable values in a DEPS file"""
parser.add_option('--var', action='append',
dest='vars', metavar='VAR=VAL', default=[],
help='Sets a variable to the given value with the format '
'name=value.')
parser.add_option('-r', '--revision', action='append',
dest='setdep_revisions', metavar='DEP@REV', default=[],
help='Sets the revision/version for the dependency with '
'the format dep@rev. If it is a git dependency, dep '
'must be a path and rev must be a git hash or '
'reference (e.g. src/dep@deadbeef). If it is a CIPD '
'dependency, dep must be of the form path:package and '
'rev must be the package version '
'(e.g. src/pkg:chromium/pkg@2.1-cr0).')
parser.add_option('--deps-file', default='DEPS',
# TODO(ehmaldonado): Try to find the DEPS file pointed by
# .gclient first.
help='The DEPS file to be edited. Defaults to the DEPS '
'file in the current directory.')
(options, args) = parser.parse_args(args)
if args:
parser.error('Unused arguments: "%s"' % '" "'.join(args))
if not options.setdep_revisions and not options.vars:
parser.error(
'You must specify at least one variable or revision to modify.')
if not os.path.isfile(options.deps_file):
raise gclient_utils.Error(
'DEPS file %s does not exist.' % options.deps_file)
with open(options.deps_file) as f:
contents = f.read()
client = GClient.LoadCurrentConfig(options)
if client is not None:
builtin_vars = client.get_builtin_vars()
else:
logging.warning(
'Couldn\'t find a valid gclient config. Will attempt to parse the DEPS '
'file without support for built-in variables.')
builtin_vars = None
local_scope = gclient_eval.Exec(contents, options.deps_file,
builtin_vars=builtin_vars)
for var in options.vars:
name, _, value = var.partition('=')
if not name or not value:
parser.error(
'Wrong var format: %s should be of the form name=value.' % var)
if name in local_scope['vars']:
gclient_eval.SetVar(local_scope, name, value)
else:
gclient_eval.AddVar(local_scope, name, value)
for revision in options.setdep_revisions:
name, _, value = revision.partition('@')
if not name or not value:
parser.error(
'Wrong dep format: %s should be of the form dep@rev.' % revision)
if ':' in name:
name, _, package = name.partition(':')
if not name or not package:
parser.error(
'Wrong CIPD format: %s:%s should be of the form path:pkg@version.'
% (name, package))
gclient_eval.SetCIPD(local_scope, name, package, value)
else:
gclient_eval.SetRevision(local_scope, name, value)
with open(options.deps_file, 'wb') as f:
f.write(gclient_eval.RenderDEPSFile(local_scope).encode('utf-8'))
@metrics.collector.collect_metrics('gclient verify')
def CMDverify(parser, args):
"""Verifies the DEPS file deps are only from allowed_hosts."""
(options, args) = parser.parse_args(args)
client = GClient.LoadCurrentConfig(options)
if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'')
client.RunOnDeps(None, [])
# Look at each first-level dependency of this gclient only.
for dep in client.dependencies:
bad_deps = dep.findDepsFromNotAllowedHosts()
if not bad_deps:
continue
print("There are deps from not allowed hosts in file %s" % dep.deps_file)
for bad_dep in bad_deps:
print("\t%s at %s" % (bad_dep.name, bad_dep.url))
print("allowed_hosts:", ', '.join(dep.allowed_hosts))
sys.stdout.flush()
raise gclient_utils.Error(
'dependencies from disallowed hosts; check your DEPS file.')
return 0
@subcommand.epilog("""For more information on what metrics are we collecting and
why, please read metrics.README.md or visit https://bit.ly/2ufRS4p""")
@metrics.collector.collect_metrics('gclient metrics')
def CMDmetrics(parser, args):
"""Reports, and optionally modifies, the status of metric collection."""
parser.add_option('--opt-in', action='store_true', dest='enable_metrics',
help='Opt-in to metrics collection.',
default=None)
parser.add_option('--opt-out', action='store_false', dest='enable_metrics',
help='Opt-out of metrics collection.')
options, args = parser.parse_args(args)
if args:
parser.error('Unused arguments: "%s"' % '" "'.join(args))
if not metrics.collector.config.is_googler:
print("You're not a Googler. Metrics collection is disabled for you.")
return 0
if options.enable_metrics is not None:
metrics.collector.config.opted_in = options.enable_metrics
if metrics.collector.config.opted_in is None:
print("You haven't opted in or out of metrics collection.")
elif metrics.collector.config.opted_in:
print("You have opted in. Thanks!")
else:
print("You have opted out. Please consider opting in.")
return 0
class OptionParser(optparse.OptionParser):
gclientfile_default = os.environ.get('GCLIENT_FILE', '.gclient')
def __init__(self, **kwargs):
optparse.OptionParser.__init__(
self, version='%prog ' + __version__, **kwargs)
# Some arm boards have issues with parallel sync.
if platform.machine().startswith('arm'):
jobs = 1
else:
jobs = max(8, gclient_utils.NumLocalCpus())
self.add_option(
'-j', '--jobs', default=jobs, type='int',
help='Specify how many SCM commands can run in parallel; defaults to '
'%default on this machine')
self.add_option(
'-v', '--verbose', action='count', default=0,
help='Produces additional output for diagnostics. Can be used up to '
'three times for more logging info.')
self.add_option(
'--gclientfile', dest='config_filename',
help='Specify an alternate %s file' % self.gclientfile_default)
self.add_option(
'--spec',
help='create a gclient file containing the provided string. Due to '
'Cygwin/Python brokenness, it can\'t contain any newlines.')
self.add_option(
'--no-nag-max', default=False, action='store_true',
help='Ignored for backwards compatibility.')
def parse_args(self, args=None, _values=None):
"""Integrates standard options processing."""
# Create an optparse.Values object that will store only the actual passed
# options, without the defaults.
actual_options = optparse.Values()
_, args = optparse.OptionParser.parse_args(self, args, actual_options)
# Create an optparse.Values object with the default options.
options = optparse.Values(self.get_default_values().__dict__)
# Update it with the options passed by the user.
options._update_careful(actual_options.__dict__)
# Store the options passed by the user in an _actual_options attribute.
# We store only the keys, and not the values, since the values can contain
# arbitrary information, which might be PII.
metrics.collector.add('arguments', list(actual_options.__dict__))
levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(
level=levels[min(options.verbose, len(levels) - 1)],
format='%(module)s(%(lineno)d) %(funcName)s:%(message)s')
if options.config_filename and options.spec:
self.error('Cannot specify both --gclientfile and --spec')
if (options.config_filename and
options.config_filename != os.path.basename(options.config_filename)):
self.error('--gclientfile target must be a filename, not a path')
if not options.config_filename:
options.config_filename = self.gclientfile_default
options.entries_filename = options.config_filename + '_entries'
if options.jobs < 1:
self.error('--jobs must be 1 or higher')
# These hacks need to die.
if not hasattr(options, 'revisions'):
# GClient.RunOnDeps expects it even if not applicable.
options.revisions = []
if not hasattr(options, 'head'):
options.head = None
if not hasattr(options, 'nohooks'):
options.nohooks = True
if not hasattr(options, 'noprehooks'):
options.noprehooks = True
if not hasattr(options, 'deps_os'):
options.deps_os = None
if not hasattr(options, 'force'):
options.force = None
return (options, args)
def disable_buffering():
# Make stdout auto-flush so buildbot doesn't kill us during lengthy
# operations. Python as a strong tendency to buffer sys.stdout.
sys.stdout = gclient_utils.MakeFileAutoFlush(sys.stdout)
# Make stdout annotated with the thread ids.
sys.stdout = gclient_utils.MakeFileAnnotated(sys.stdout)
def path_contains_tilde():
for element in os.environ['PATH'].split(os.pathsep):
if element.startswith('~') and os.path.abspath(
os.path.realpath(os.path.expanduser(element))) == DEPOT_TOOLS_DIR:
return True
return False
def can_run_gclient_and_helpers():
if sys.hexversion < 0x02060000:
print(
'\nYour python version %s is unsupported, please upgrade.\n' %
sys.version.split(' ', 1)[0],
file=sys.stderr)
return False
if not sys.executable:
print(
'\nPython cannot find the location of it\'s own executable.\n',
file=sys.stderr)
return False
if path_contains_tilde():
print(
'\nYour PATH contains a literal "~", which works in some shells ' +
'but will break when python tries to run subprocesses. ' +
'Replace the "~" with $HOME.\n' +
'See https://crbug.com/952865.\n',
file=sys.stderr)
return False
return True
def main(argv):
"""Doesn't parse the arguments here, just find the right subcommand to
execute."""
if not can_run_gclient_and_helpers():
return 2
fix_encoding.fix_encoding()
disable_buffering()
setup_color.init()
dispatcher = subcommand.CommandDispatcher(__name__)
try:
return dispatcher.execute(OptionParser(), argv)
except KeyboardInterrupt:
gclient_utils.GClientChildren.KillAllRemainingChildren()
raise
except (gclient_utils.Error, subprocess2.CalledProcessError) as e:
print('Error: %s' % str(e), file=sys.stderr)
return 1
finally:
gclient_utils.PrintWarnings()
return 0
if '__main__' == __name__:
with metrics.collector.print_notice_and_exit():
sys.exit(main(sys.argv[1:]))
# vim: ts=2:sw=2:tw=80:et:
|
CoherentLabs/depot_tools
|
gclient.py
|
Python
|
bsd-3-clause
| 121,375
|
#!/usr/bin/env python
'''
===============================================================================
Interactive Image Segmentation using GrabCut algorithm.
This sample shows interactive image segmentation using grabcut algorithm.
USAGE :
python grabcut.py <filename>
README FIRST:
Two windows will show up, one for input and one for output.
At first, in input window, draw a rectangle around the object using
mouse right button. Then press 'n' to segment the object (once or a few times)
For any finer touch-ups, you can press any of the keys below and draw lines on
the areas you want. Then again press 'n' for updating the output.
Key '0' - To select areas of sure background
Key '1' - To select areas of sure foreground
Key '2' - To select areas of probable background
Key '3' - To select areas of probable foreground
Key 'n' - To update the segmentation
Key 'r' - To reset the setup
Key 's' - To save the results
===============================================================================
'''
import numpy as np
import cv2
import sys
BLUE = [255,0,0] # rectangle color
RED = [0,0,255] # PR BG
GREEN = [0,255,0] # PR FG
BLACK = [0,0,0] # sure BG
WHITE = [255,255,255] # sure FG
DRAW_BG = {'color' : BLACK, 'val' : 0}
DRAW_FG = {'color' : WHITE, 'val' : 1}
DRAW_PR_FG = {'color' : GREEN, 'val' : 3}
DRAW_PR_BG = {'color' : RED, 'val' : 2}
# setting up flags
rect = (0,0,1,1)
drawing = False # flag for drawing curves
rectangle = False # flag for drawing rect
rect_over = False # flag to check if rect drawn
rect_or_mask = 100 # flag for selecting rect or mask mode
value = DRAW_FG # drawing initialized to FG
thickness = 3 # brush thickness
def onmouse(event,x,y,flags,param):
global img,img2,drawing,value,mask,rectangle,rect,rect_or_mask,ix,iy,rect_over
# Draw Rectangle
if event == cv2.EVENT_RBUTTONDOWN:
rectangle = True
ix,iy = x,y
elif event == cv2.EVENT_MOUSEMOVE:
if rectangle == True:
img = img2.copy()
cv2.rectangle(img,(ix,iy),(x,y),BLUE,2)
rect = (ix,iy,abs(ix-x),abs(iy-y))
rect_or_mask = 0
elif event == cv2.EVENT_RBUTTONUP:
rectangle = False
rect_over = True
cv2.rectangle(img,(ix,iy),(x,y),BLUE,2)
rect = (ix,iy,abs(ix-x),abs(iy-y))
rect_or_mask = 0
print " Now press the key 'n' a few times until no further change \n"
# draw touchup curves
if event == cv2.EVENT_LBUTTONDOWN:
if rect_over == False:
print "first draw rectangle \n"
else:
drawing = True
cv2.circle(img,(x,y),thickness,value['color'],-1)
cv2.circle(mask,(x,y),thickness,value['val'],-1)
elif event == cv2.EVENT_MOUSEMOVE:
if drawing == True:
cv2.circle(img,(x,y),thickness,value['color'],-1)
cv2.circle(mask,(x,y),thickness,value['val'],-1)
elif event == cv2.EVENT_LBUTTONUP:
if drawing == True:
drawing = False
cv2.circle(img,(x,y),thickness,value['color'],-1)
cv2.circle(mask,(x,y),thickness,value['val'],-1)
# print documentation
print __doc__
# Loading images
if len(sys.argv) == 2:
filename = sys.argv[1] # for drawing purposes
else:
print "No input image given, so loading default image, lena.jpg \n"
print "Correct Usage : python grabcut.py <filename> \n"
filename = '../cpp/lena.jpg'
img = cv2.imread(filename)
img2 = img.copy() # a copy of original image
mask = np.zeros(img.shape[:2],dtype = np.uint8) # mask initialized to PR_BG
output = np.zeros(img.shape,np.uint8) # output image to be shown
# input and output windows
cv2.namedWindow('output')
cv2.namedWindow('input')
cv2.setMouseCallback('input',onmouse)
cv2.moveWindow('input',img.shape[1]+10,90)
print " Instructions : \n"
print " Draw a rectangle around the object using right mouse button \n"
while(1):
cv2.imshow('output',output)
cv2.imshow('input',img)
k = 0xFF & cv2.waitKey(1)
# key bindings
if k == 27: # esc to exit
break
elif k == ord('0'): # BG drawing
print " mark background regions with left mouse button \n"
value = DRAW_BG
elif k == ord('1'): # FG drawing
print " mark foreground regions with left mouse button \n"
value = DRAW_FG
elif k == ord('2'): # PR_BG drawing
value = DRAW_PR_BG
elif k == ord('3'): # PR_FG drawing
value = DRAW_PR_FG
elif k == ord('s'): # save image
bar = np.zeros((img.shape[0],5,3),np.uint8)
res = np.hstack((img2,bar,img,bar,output))
cv2.imwrite('grabcut_output.png',res)
print " Result saved as image \n"
elif k == ord('r'): # reset everything
print "resetting \n"
rect = (0,0,1,1)
drawing = False
rectangle = False
rect_or_mask = 100
rect_over = False
value = DRAW_FG
img = img2.copy()
mask = np.zeros(img.shape[:2],dtype = np.uint8) # mask initialized to PR_BG
output = np.zeros(img.shape,np.uint8) # output image to be shown
elif k == ord('n'): # segment the image
print """ For finer touchups, mark foreground and background after pressing keys 0-3
and again press 'n' \n"""
if (rect_or_mask == 0): # grabcut with rect
bgdmodel = np.zeros((1,65),np.float64)
fgdmodel = np.zeros((1,65),np.float64)
cv2.grabCut(img2,mask,rect,bgdmodel,fgdmodel,1,cv2.GC_INIT_WITH_RECT)
rect_or_mask = 1
elif rect_or_mask == 1: # grabcut with mask
bgdmodel = np.zeros((1,65),np.float64)
fgdmodel = np.zeros((1,65),np.float64)
cv2.grabCut(img2,mask,rect,bgdmodel,fgdmodel,1,cv2.GC_INIT_WITH_MASK)
mask2 = np.where((mask==1) + (mask==3),255,0).astype('uint8')
output = cv2.bitwise_and(img2,img2,mask=mask2)
cv2.destroyAllWindows()
|
grace-/opencv-3.0.0-cvpr
|
opencv/samples/python2/grabcut.py
|
Python
|
bsd-3-clause
| 6,057
|
from tempfile import gettempdir
from os.path import join, dirname
import example_project
ADMINS = (
)
MANAGERS = ADMINS
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DISABLE_CACHE_TEMPLATE = DEBUG
DATABASE_ENGINE = 'sqlite3'
DATABASE_NAME = join(gettempdir(), 'django_ratings_example_project.db')
TEST_DATABASE_NAME =join(gettempdir(), 'test_django_ratings_example_project.db')
DATABASE_USER = ''
DATABASE_PASSWORD = ''
DATABASE_HOST = ''
DATABASE_PORT = ''
TIME_ZONE = 'Europe/Prague'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
# Make this unique, and don't share it with anybody.
SECRET_KEY = '88b-01f^x4lh$-s5-hdccnicekg07)niir2g6)93!0#k(=mfv$'
EMAIL_SUBJECT_PREFIX = 'Example project admin: '
# templates for this app
DEBUG = True
TEMPLATE_DEBUG = DEBUG
DISABLE_CACHE_TEMPLATE = DEBUG
# TODO: Fix logging
# init logger
#LOGGING_CONFIG_FILE = join(dirname(testbed.__file__), 'settings', 'logger.ini')
#if isinstance(LOGGING_CONFIG_FILE, basestring) and isfile(LOGGING_CONFIG_FILE):
# logging.config.fileConfig(LOGGING_CONFIG_FILE)
# LOGGING_CONFIG_FILE = join( dirname(__file__), 'logger.conf')
# we want to reset whole cache in test
# until we do that, don't use cache
CACHE_BACKEND = 'dummy://'
# session expire
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
# disable double render in admin
# DOUBLE_RENDER = False
MEDIA_ROOT = join(dirname(example_project.__file__), 'static')
MEDIA_URL = '/static/'
ADMIN_MEDIA_PREFIX = '/static/admin_media/'
|
ella/django-ratings
|
tests/example_project/settings/config.py
|
Python
|
bsd-3-clause
| 1,465
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-22 13:40
from __future__ import unicode_literals
import django.contrib.auth.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0005_user_resume'),
]
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username'),
),
]
|
TexasLAN/texaslan.org
|
texaslan/users/migrations/0006_auto_20160822_0840.py
|
Python
|
mit
| 745
|
#!/usr/bin/env python2
import dropbox, sys, os
prefix = "travis-kernel-ci"
d = os.getenv("TRAVIS_BUILD_ID")
if not d:
d = "trash"
n = os.getenv("KNAME")
if not n:
n = "undef"
access_token = os.getenv("DROPBOX_TOKEN")
client = dropbox.client.DropboxClient(access_token)
f = open(sys.argv[1])
fname = os.path.basename(sys.argv[1])
dname = sys.argv[2]
response = client.put_file(os.path.join(prefix, d, n, dname, fname), f)
print 'uploaded: ', response
#print "=====================", fname, "======================"
#print client.share(fname)['url']
#print "=====================", len(fname) * "=", "======================"
|
0x7f454c46/travis-kernel-ci
|
dropbox_upload.py
|
Python
|
gpl-2.0
| 642
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from shoop.admin.module_registry import replace_modules
from shoop.admin.modules.products import ProductModule
from shoop.admin.modules.products.views.edit import ProductEditView
from shoop.admin.utils.urls import get_model_url
from shoop.admin.views.search import get_search_results
from shoop.core.models import ProductVisibility
from shoop_tests.admin.utils import admin_only_urls
from shoop_tests.utils import empty_iterable
from shoop.testing.factories import get_default_product, get_default_shop, create_product
from shoop.testing.utils import apply_request_middleware
@pytest.mark.django_db
def test_product_module_search(rf, admin_user):
get_default_shop()
request = apply_request_middleware(rf.get("/"), user=admin_user)
with replace_modules([ProductModule]):
with admin_only_urls():
default_product = get_default_product()
model_url = get_model_url(default_product)
sku = default_product.sku
assert any(sr.url == model_url for sr in get_search_results(request, query=sku)) # Queries work
assert any(sr.is_action for sr in get_search_results(request, query=sku[:5])) # Actions work
assert empty_iterable(get_search_results(request, query=sku[:2])) # Short queries don't
@pytest.mark.django_db
def test_product_edit_view_works_at_all(rf, admin_user):
shop = get_default_shop()
product = create_product("test-product", shop, default_price=200)
shop_product = product.get_shop_instance(shop)
shop_product.visibility_limit = ProductVisibility.VISIBLE_TO_GROUPS
shop_product.save()
request = apply_request_middleware(rf.get("/"), user=admin_user)
with replace_modules([ProductModule]):
with admin_only_urls():
view_func = ProductEditView.as_view()
response = view_func(request, pk=product.pk)
assert (product.sku in response.rendered_content) # it's probable the SKU is there
response = view_func(request, pk=None) # "new mode"
assert response.rendered_content # yeah, something gets rendered
@pytest.mark.django_db
def test_product_edit_view_with_params(rf, admin_user):
get_default_shop()
sku = "test-sku"
name = "test name"
request = apply_request_middleware(rf.get("/", {"name": name, "sku": sku}), user=admin_user)
with replace_modules([ProductModule]):
with admin_only_urls():
view_func = ProductEditView.as_view()
response = view_func(request)
assert (sku in response.rendered_content) # it's probable the SKU is there
assert (name in response.rendered_content) # it's probable the name is there
|
jorge-marques/shoop
|
shoop_tests/admin/test_product_module.py
|
Python
|
agpl-3.0
| 2,947
|
# -*- coding: utf-8 -*-
# This code is part of Amoco
# Copyright (C) 2006-2011 Axel Tillequin (bdcht3@gmail.com)
# published under GPLv2 license
"""
render.py
=========
This module implements amoco's pygments interface to allow pretty printed
outputs of tables of tokens built from amoco's expressions and instructions.
The rendered texts are used as main inputs for graphic engines to build
their own views' objects.
A token is a tuple (t,s) where t is a Token type and s is a python string.
The highlight method uses the Token type to decorate the string s such that
the targeted renderer is able to show the string with foreground/background
colors and bold/underline/etc stroke attributes.
The API of this module is essentially the vltable class which implements its
str interface by calls to the highlight function, wrapping the pygments formatters
to allow colored output.
Note that more specialized formatting like HTML tables or even LaTeX blobs is
also possible.
If the pygments package is not found, all output default to a kind of
"NullFormatter" that will just ignore input tokens' types and just assemble lines
into undercorated unicode strings.
"""
from io import BytesIO as StringIO
from amoco.config import conf
from amoco.logger import Log
logger = Log(__name__)
logger.debug("loading module")
import re
try:
from pygments.token import Token
from pygments.style import Style
from pygments.lexer import RegexLexer
from pygments.formatters import *
except ImportError:
logger.verbose("pygments package not found, no renderer defined")
has_pygments = False
# metaclass definition, with a syntax compatible with python2 and python3
class TokenType(type):
def __getattr__(cls, key):
return key
Token_base = TokenType("Token_base", (), {})
class Token(Token_base):
pass
class NullFormatter(object):
def __init__(self, **options):
self.options = options
def format(self, tokensource, outfile):
for t, v in tokensource:
outfile.write(v.encode("utf-8"))
Formats = {
"Null": NullFormatter(),
}
else:
logger.verbose("pygments package imported")
has_pygments = True
# define default dark style:
dark = {
Token.Literal : "#fff",
Token.Address : "#fb0",
Token.Orange : "#fb0",
Token.Constant : "#f30",
Token.Red : "#f30",
Token.Prefix : "#fff",
Token.Mnemonic : "bold",
Token.Register : "#33f",
Token.Memory : "#3ff",
Token.String : "#3f3",
Token.Segment : "#888",
Token.Comment : "#f8f",
Token.Green : "#8f8",
Token.Good : "bold #8f8",
Token.Name : "bold",
Token.Alert : "bold #f00",
Token.Column : "#000",
}
S = {}
# define sub-tokens with Mark/Taint/Hide atrribute,
# allowing to set tokens types like Token.Register.Taint
for k in dark.keys():
S[getattr(k,'Mark')] = "bg:#224"
S[getattr(k,'Taint')] = "bg:#422"
S[getattr(k,'Hide')] = "noinherit #222"
dark.update(S)
class DarkStyle(Style):
default_style = ""
styles = dark
# define default light style:
light = {
Token.Literal : "",
Token.Address : "#c30",
Token.Orange : "#c30",
Token.Constant : "#d00",
Token.Red : "#d00",
Token.Prefix : "#000",
Token.Mnemonic : "bold",
Token.Register : "#00f",
Token.Memory : "#00c0c0",
Token.String : "#008800",
Token.Segment : "#888",
Token.Comment : "#a3a",
Token.Green : "#008800",
Token.Good : "bold #008800",
Token.Name : "bold",
Token.Alert : "bold #f00",
Token.Column : "#fff",
}
S = {}
for k in light.keys():
S[getattr(k,'Mark')] = "bg:#aaaaff"
S[getattr(k,'Taint')] = "bg:#ffaaaa"
S[getattr(k,'Hide')] = "noinherit #fff"
light.update(S)
class LightStyle(Style):
default_style = ""
styles = light
# the default style is dark:
DefaultStyle = DarkStyle
# define supported formatters:
Formats = {
"Null" : NullFormatter(encoding="utf-8"),
"Terminal" : TerminalFormatter(style=DefaultStyle, encoding="utf-8"),
"Terminal256" : Terminal256Formatter(style=DefaultStyle, encoding="utf-8"),
"TerminalDark" : Terminal256Formatter(style=DarkStyle, encoding="utf-8"),
"TerminalLight": Terminal256Formatter(style=LightStyle, encoding="utf-8"),
"Html" : HtmlFormatter(style=LightStyle, encoding="utf-8"),
"HtmlDark" : HtmlFormatter(style=DarkStyle, encoding="utf-8"),
}
def highlight(toks, formatter=None, outfile=None):
"""
Pretty prints a list of tokens using optionally
a given formatter and an output io buffer.
If no explicit formatter is given, use the formatter from configuration
or the Null formatter if not specified in the amoco configuration.
If no output io buffer is given, a local StringIO is used.
The returned value is a decorated python string.
"""
formatter = formatter or Formats.get(conf.UI.formatter,"Null")
if isinstance(formatter, str):
formatter = Formats[formatter]
outfile = outfile or StringIO()
formatter.format(toks, outfile)
return outfile.getvalue().decode("utf-8")
def TokenListJoin(j, lst):
"""
insert token j (Literal if j is str) between elements of lst.
If lst[0] is a list, it is updated with following elements, else
a new list is returned.
Arguments:
j (token or str): the token tuple (Token.type, str) or
the str used as (Token.Literal, str) "join".
lst (list) : the list of token tuples to "join" with j.
Returns:
lst[0] updated with joined lst[1:] iff lst[0] is a list,
or a new list joined from elements of lst otherwise.
"""
# define join token:
if isinstance(j, str):
j = (Token.Literal, j)
# init output list:
res = lst[0] if len(lst)>0 else []
if not isinstance(res,list):
res = [res]
for x in lst[1:]:
res.append(j)
if isinstance(x,list):
res.extend(x)
else:
res.append(x)
return res
def LambdaTokenListJoin(j,f):
"""
returns a lambda that takes instruction i and returns the TokenListJoin
build from join argument j and lst argument f(i).
"""
return lambda i: TokenListJoin(j, f(i))
class vltable(object):
"""
A variable length table relies on pygments to pretty print tabulated data.
Arguments:
rows (list): optional argument with initial list of tokenrows.
formatter (Formatter): optional pygment's formatter to use
(defaults to conf.UI.formatter.)
outfile (file): optional output file passed to the formatter
(defaults to StringIO.)
Attributes:
rows (list of tokenrow): lines of the table, with tabulated data.
rowparams (dict): parameters associated with a line.
maxlength: maximum number of lines (default to infinity).
hidden_r (set): rows that should be hidden.
squash_r (bool): row is removed if True or empty if False.
hidden_c (set): columns that should be hidden.
squash_c (bool): column is removed if True or empty if False.
colsize (dict): mapping column index to its required width.
width (int): total width of the table.
height (int): total heigth of the table.
nrows (int): total number of rows (lines).
ncols (int): total number of columns.
header (str): table header line (empty by default).
footer (str): table footer line (empty by default).
"""
def __init__(self, rows=None, formatter=None, outfile=None):
if rows is None:
rows = []
self.rows = rows
self.rowparams = {
"colsize": {},
"hidden_c": set(),
"squash_c": True,
"formatter": formatter,
"outfile": outfile,
}
self.maxlength = float("inf")
self.hidden_r = set()
self.hidden_c = self.rowparams["hidden_c"]
self.squash_r = True
self.colsize = self.rowparams["colsize"]
self.update()
self.header = ""
self.footer = ""
def update(self, *rr):
"recompute the column width over rr range of rows, and update colsize array"
for c in range(self.ncols):
cz = self.colsize.get(c, 0) if len(rr) > 0 else 0
self.colsize[c] = max(cz, self.getcolsize(c, rr, squash=False))
def getcolsize(self, c, rr=None, squash=True):
"compute the given column width (over rr list of row indices if not None.)"
cz = 0
if not rr:
rr = range(self.nrows)
for i in rr:
if self.rowparams["squash_c"] and (i in self.hidden_r):
if squash:
continue
cz = max(cz, self.rows[i].colsize(c))
return cz
@property
def width(self):
sep = self.rowparams.get("sep", "")
cs = self.ncols * len(sep)
return sum(self.colsize.values(), cs)
def setcolsize(self, c, value):
"set column size to value"
i = range(self.ncols)[c]
self.colsize[i] = value
def addcolsize(self, c, value):
"set column size to value"
i = range(self.ncols)[c]
self.colsize[i] += value
def addrow(self, toks):
"add row of given list of tokens and update table"
self.rows.append(tokenrow(toks))
self.update()
return self
def addcolumn(self,lot,c=None):
"add column with provided toks (before index c if given) and update table"
if c is None:
c = self.ncols
for ir,toks in enumerate(lot):
if ir < self.nrows:
r = self.rows[ir]
for _ in range(r.ncols,c):
r.cols.append([(Token.Column, "")])
toks.insert(0,(Token.Column, ""))
r.cols.insert(c,toks)
else:
logger.warning("addcolumn: to much rows in provided list of tokens")
break
self.update()
return self
def hiderow(self, n):
"hide given row"
self.hidden_r.add(n)
def showrow(self, n):
"show given row"
self.hidden_r.remove(n)
def hidecolumn(self, n):
"hide given column"
self.hidden_c.add(n)
def showcolumn(self, n):
"show given column"
self.hidden_c.remove(n)
def showall(self):
"remove all hidden rows/cols"
self.hidden_r = set()
self.rowparams["hidden_c"] = set()
self.hidden_c = self.rowparams["hidden_c"]
return self
def grep(self, regex, col=None, invert=False):
"search for a regular expression in the table"
L = set()
R = range(self.nrows)
for i in R:
if i in self.hidden_r:
continue
C = self.rows[i].rawcols(col)
for c, s in enumerate(C):
if c in self.hidden_c:
continue
if re.search(regex, s):
L.add(i)
break
if not invert:
L = set(R) - L
for n in L:
self.hiderow(n)
return self
@property
def nrows(self):
return len(self.rows)
@property
def ncols(self):
if self.nrows > 0:
return max((r.ncols for r in self.rows))
else:
return 0
def __str__(self):
s = []
formatter = self.rowparams["formatter"]
outfile = self.rowparams["outfile"]
for i in range(self.nrows):
if i in self.hidden_r:
if not self.squash_r:
s.append(
highlight(
[
(
Token.Hide,
self.rows[i].show(raw=True, **self.rowparams),
)
],
formatter,
outfile,
)
)
else:
s.append(self.rows[i].show(**self.rowparams))
if len(s) > self.maxlength:
s = s[: self.maxlength - 1]
s.append(highlight([(Token.Literal, icons.dots)], formatter, outfile))
if self.header:
s.insert(0, self.header)
if self.footer:
s.append(self.footer)
return "\n".join(s)
class tokenrow(object):
"""
A vltable row (line) of tabulated data tokens.
Attributes:
toks (list): list of tokens tuple (Token.Type, str).
maxwidth: maximum authorized width of this row.
align (str): left/center/right aligment indicator (default to "<" left).
fill (str): fill character used for padding to required size.
separator (str): character used for separation of columns.
cols (list): list of columns of tokens.
ncols (int): number of columns in this row.
"""
def __init__(self, toks=None):
if toks is None:
toks = []
self.maxwidth = float("inf")
self.align = "<"
self.fill = " "
self.separator = ""
toks = [(t, "%s" % s) for (t, s) in toks]
self.cols = self.cut(toks)
def cut(self,toks):
"cut the raw list of tokens into a list of column of tokens"
C = []
c = []
for t in toks:
c.append(t)
if t[0] == Token.Column:
C.append(c)
c = []
C.append(c)
return C
def colsize(self, c):
"return the column size (width)"
if c >= len(self.cols):
return 0
return sum((len(t[1]) for t in self.cols[c] if t[0] != Token.Column))
@property
def ncols(self):
return len(self.cols)
def rawcols(self, j=None):
"return the raw (undecorated) string of this row (j-th column if given)"
r = []
cols = self.cols
if j is not None:
cols = self.cols[j : j + 1]
for c in cols:
r.append("".join([t[1] for t in c]))
return r
def show(self, raw=False, **params):
"highlight the row with optional parameters"
formatter = params.get("formatter", None)
outfile = params.get("outfile", None)
align = params.get("align", self.align)
fill = params.get("fill", self.fill)
sep = params.get("sep", self.separator)
width = params.get("maxwidth", self.maxwidth)
colsz = params.get("colsize")
hidden_c = params.get("hidden_c", set())
squash_c = params.get("squash_c", True)
head = params.get("head", "")
tail = params.get("tail", "")
if raw:
formatter = "Null"
outfile = None
r = [head]
tz = 0
for i, c in enumerate(self.cols):
toks = []
sz = 0
mz = colsz[i]
tz += mz
if tz > width:
mz = mz - (tz - width)
skip = False
for tt, tv in c:
if tt == Token.Column:
break
if skip:
continue
toks.append([tt, "%s" % tv])
sz += len(tv)
if sz > mz:
q = (sz - mz) + 3
toks[-1][1] = tv[0:-q] + "###"
skip = True
if sz < mz:
pad = fill * (mz - sz)
if align == "<":
toks[-1][1] += pad
elif align == ">":
toks[0][1] = pad + toks[0][1]
if i in hidden_c:
if not squash_c:
toks = [(Token.Hide, highlight(toks, "Null", None))]
else:
toks = []
r.append(highlight(toks, formatter, outfile))
if tt == Token.Column and sep:
r.append(sep)
r.append(tail)
return "".join(r)
class Icons:
sep = ' | '
dots = '...'
tri = ' > '
lar = ' <- '
dbl = '='
hor = '-'
ver = '|'
top = 'T'
bot = '_'
usep = ' \u2502 '
udots = '\u2504 '
utri = ' \u25b6 '
ular = ' \u21fd '
udbl = '\u2550'
uhor = '\u2500'
uver = '\u2502'
utop = '\u22A4'
ubot = '\u22A5'
mop = {}
def __getattribute__(self,a):
if a not in ('mop','op') and conf.UI.unicode:
return super().__getattribute__('u'+a)
else:
return super().__getattribute__(a)
def op(self,symbol):
if conf.Cas.unicode:
return self.mop.get(symbol,symbol)
else:
return symbol
icons = Icons()
# define operator unicode symbols:
icons.mop["-"] = "\u2212"
icons.mop["**"] = "\u2217"
icons.mop["&"] = "\u2227"
icons.mop["|"] = "\u2228"
icons.mop["^"] = "\u2295"
icons.mop["~"] = "\u2310"
icons.mop["=="] = "\u225f"
icons.mop["!="] = "\u2260"
icons.mop["<="] = "\u2264"
icons.mop[">="] = "\u2265"
icons.mop[">=."] = "\u22DD"
icons.mop["<."] = "\u22D6"
icons.mop["<<"] = "\u226a"
icons.mop[">>"] = "\u226b"
icons.mop[".>>"] = "\u00B1\u226b"
icons.mop["<<<"] = "\u22d8"
icons.mop[">>>"] = "\u22d9"
def replace_mnemonic_token(l,value):
for i in range(len(l)):
tn,tv = l[i]
if tn==Token.Mnemonic:
tv = value.ljust(len(tv))
l[i] = (tn,tv)
def replace_opn_token(l,n,value):
index = 1+(2*n)
if value is None:
if index+1 < len(l):
l.pop(index+1)
l.pop(index)
else:
tn,tv = l[index]
if isinstance(value,tuple):
l[index] = value
else:
l[index] = (tn, value)
|
bdcht/amoco
|
amoco/ui/render.py
|
Python
|
gpl-2.0
| 18,256
|
# coding=utf-8
"""Dialog test.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'pnakis@hotmail.com'
__date__ = '2017-06-04'
__copyright__ = 'Copyright 2017, Panagiotis Nakis'
import unittest
from PyQt4.QtGui import QDialogButtonBox, QDialog
from vector_transform_dialog import VectorTranformationDialog
from utilities import get_qgis_app
QGIS_APP = get_qgis_app()
class VectorTranformationDialogTest(unittest.TestCase):
"""Test dialog works."""
def setUp(self):
"""Runs before each test."""
self.dialog = VectorTranformationDialog(None)
def tearDown(self):
"""Runs after each test."""
self.dialog = None
def test_dialog_ok(self):
"""Test we can click OK."""
button = self.dialog.button_box.button(QDialogButtonBox.Ok)
button.click()
result = self.dialog.result()
self.assertEqual(result, QDialog.Accepted)
def test_dialog_cancel(self):
"""Test we can click cancel."""
button = self.dialog.button_box.button(QDialogButtonBox.Cancel)
button.click()
result = self.dialog.result()
self.assertEqual(result, QDialog.Rejected)
if __name__ == "__main__":
suite = unittest.makeSuite(VectorTranformationDialogTest)
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite)
|
pnakis/qgis_vector_transform
|
test/test_vector_transform_dialog.py
|
Python
|
gpl-3.0
| 1,563
|
#############################################################################
# local_volume.py
# this file is part of GEOCUBIT #
# #
# Created by Emanuele Casarotti #
# Copyright (c) 2008 Istituto Nazionale di Geofisica e Vulcanologia #
# #
#############################################################################
# #
# This program is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., #
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. #
# #
#############################################################################
try:
import start as start
cubit = start.start_cubit()
except:
try:
import cubit
except:
print 'error importing cubit, check if cubit is installed'
pass
numpy = start.start_numpy()
def check_orientation(grdfileNAME):
try:
grdfile = open(grdfileNAME, 'r')
print 'reading ', grdfileNAME
except:
txt = 'check_orintation ->error reading: ' + str(grdfileNAME)
raise Exception(txt)
diff = 1
txt = grdfile.readline()
x0, y0, z = map(float, txt.split())
while diff > 0:
try:
txt = grdfile.readline()
except:
break
x, y, z = map(float, txt.split())
diff = x - x0
x0 = x
diff = y - y0
if diff > 0:
orientation = 'SOUTH2NORTH'
else:
orientation = 'NORTH2SOUTH'
grdfile.close()
return orientation
def read_irregular_surf(filename):
"read irregular grid"
try:
xyz = numpy.loadtxt(filename)
except:
txt = 'error reading ' + filename
raise Exception(txt)
gridpoints = xyz[:, 0:2]
z = xyz[:, 2]
return gridpoints, z
def get_interpolated_elevation(point, gridpoints, z, k=1):
"""for x0 and y0 return the interpolated z point of a irregular x,y,z grid
K=1 nearest
K>1 number of points used in a inverse distance weighted interpolation
point=(x0,y0)
gridpoints=numpy.array([[x1,y1],[x2,y2],...)
"""
dist = numpy.sum((point - gridpoints)**2, axis=1)
zindex = dist.argsort()[:k]
kmindist = dist[zindex]
w = 1 / kmindist
w /= w.sum()
zi = numpy.dot(w.T, z[zindex])
return zi
def create_grid(xmin, xmax, ymin, ymax, xstep, ystep):
"""create regular grid with xmin,xmax by xstep and ymin,ymax by ystep"""
x, y = numpy.mgrid[xmin:xmax + xstep / 2.:xstep,
ymin:ymax + ystep / 2.:ystep] # this includes bounds
gridpoints = numpy.vstack([x.ravel(), y.ravel()]).T
return x, y, gridpoints
def process_surfacefiles(iproc, nx, ny, nstep, grdfile, unit, lat_orientation):
from utilities import geo2utm
numpy = start.start_numpy()
elev = numpy.zeros([nx, ny], float)
coordx = numpy.zeros([nx, ny], float)
coordy = numpy.zeros([nx, ny], float)
icoord = 0
lat_orientation = check_orientation(grdfile)
try:
grdfile = open(grdfile, 'r')
# print 'reading ',grdfile
except:
txt = 'error reading: ' + str(grdfile)
raise Exception(txt)
if lat_orientation is 'SOUTH2NORTH':
rangey = range(0, ny)
else:
rangey = range(ny - 1, -1, -1)
lat_orientation = 'NORTH2SOUTH'
print lat_orientation
for iy in rangey:
for ix in range(0, nx):
txt = grdfile.readline()
try:
if len(txt) != 0:
x, y, z = map(float, txt.split())
if iy % nstep == 0 and ix % nstep == 0:
icoord = icoord + 1
x_current, y_current = geo2utm(x, y, unit)
jx = min(nx - 1, ix / nstep)
jy = min(ny - 1, iy / nstep)
coordx[jx, jy] = x_current
coordy[jx, jy] = y_current
elev[jx, jy] = z
except:
print 'error reading point ', iy * nx + ix, txt, \
grdfile.name, ' proc '
raise NameError('error reading point')
if (nx) * (ny) != icoord:
if iproc == 0:
print 'error in the surface file ' + grdfile.name
if iproc == 0:
print 'x points ' + str(nx) + ' y points ' + str(ny) + \
' tot points ' + str((nx) * (ny))
if iproc == 0:
print 'points read in ' + grdfile.name + ': ' + str(icoord)
raise NameError
grdfile.close()
return coordx, coordy, elev
def process_irregular_surfacefiles(iproc, nx, ny, xmin, xmax, ymin, ymax,
xstep, ystep, grdfile):
gridpoints, z = read_irregular_surf(grdfile)
coordx, coordy, points = create_grid(xmin, xmax, ymin, ymax, xstep, ystep)
elev = numpy.empty([len(points)])
for i in xrange(len(points)):
elev[i] = get_interpolated_elevation(points[i], gridpoints, z, k=4)
coordx.shape = (nx, ny)
coordy.shape = (nx, ny)
elev.shape = (nx, ny)
return coordx, coordy, elev
def read_grid(filename=None):
import start as start
mpiflag, iproc, numproc, mpi = start.start_mpi()
#
numpy = start.start_numpy()
cfg = start.start_cfg(filename=filename)
# if cfg.irregulargridded_surf==True then cfg.nx and cfg.ny are the
# desired number of point along the axis....
if cfg.nx and cfg.ny:
nx = cfg.nx
ny = cfg.ny
if cfg.nstep:
nx = min(cfg.nx, int(cfg.nx / cfg.nstep) + 1)
ny = min(cfg.ny, int(cfg.ny / cfg.nstep) + 1)
nstep = cfg.nstep
else:
nstep = 1
else:
try:
xstep = cfg.step
ystep = cfg.step
except:
xstep = cfg.xstep
ystep = cfg.ystep
nx = int((cfg.longitude_max - cfg.longitude_min) / xstep) + 1
ny = int((cfg.latitude_max - cfg.latitude_min) / ystep) + 1
nstep = 1
#
if cfg.irregulargridded_surf:
xt, xstep = numpy.linspace(cfg.xmin, cfg.xmax, num=nx, retstep=True)
yt, ystep = numpy.linspace(cfg.ymin, cfg.ymax, num=ny, retstep=True)
elev = numpy.zeros([nx, ny, cfg.nz], float)
#
if cfg.bottomflat:
elev[:, :, 0] = cfg.depth_bottom
bottomsurface = 1
else:
bottomsurface = 0
for inz in range(bottomsurface, cfg.nz - 1):
grdfilename = cfg.filename[inz - bottomsurface]
if cfg.irregulargridded_surf:
coordx, coordy, elev_1 = process_irregular_surfacefiles(
iproc, nx, ny, cfg.xmin, cfg.xmax, cfg.ymin, cfg.ymax,
xstep, ystep, grdfilename)
else:
coordx, coordy, elev_1 = process_surfacefiles(
iproc, nx, ny, nstep, grdfilename, cfg.unit,
cfg.lat_orientation)
elev[:, :, inz] = elev_1[:, :]
#
inz = cfg.nz - 1 # last surface
if cfg.sea:
elev[:, :, inz] = elev[:, :, inz - 1]
else:
if cfg.topflat:
elev[:, :, inz] = cfg.depth_top
else:
grdfile = cfg.filename[inz - bottomsurface]
print 'reading ', cfg.filename[inz - bottomsurface]
if cfg.irregulargridded_surf:
coordx, coordy, elev_1 = process_irregular_surfacefiles(
iproc, nx, ny, cfg.xmin, cfg.xmax, cfg.ymin, cfg.ymax,
xstep, ystep, grdfile)
else:
coordx, coordy, elev_1 = process_surfacefiles(
iproc, nx, ny, nstep, grdfile, cfg.unit,
cfg.lat_orientation)
elev[:, :, inz] = elev_1[:, :]
if cfg.subduction:
print 'subduction'
top = elev[:, :, inz]
slab = elev[:, :, inz - 1]
subcrit = numpy.abs(top - slab) < cfg.subduction_thres
top[subcrit] = slab[subcrit] + cfg.subduction_thres
print len(top[subcrit])
elev[:, :, inz] = top
return coordx, coordy, elev, nx, ny
|
casarotti/GEOCUBIT--experimental
|
geocubitlib/local_volume.py
|
Python
|
gpl-3.0
| 9,241
|
"""empty message
Revision ID: a9cb6e2602a8
Revises: 347c8c1b97d6
Create Date: 2016-03-24 17:26:15.753303
"""
# revision identifiers, used by Alembic.
revision = 'a9cb6e2602a8'
down_revision = '347c8c1b97d6'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('translation', sa.Column('usage', sa.String(length=200), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('translation', 'usage')
### end Alembic commands ###
|
itsankoff/wordrunner
|
migrations/versions/a9cb6e2602a8_.py
|
Python
|
gpl-3.0
| 622
|
import os
import pwd
class NoSuchUser(Exception):
pass
class User:
def __init__(self, uid, gid, username, homedir):
self.uid = uid
self.gid = gid
self.username = username
self.homedir = homedir
def get_pegasus_dir(self):
return os.path.join(self.homedir, ".pegasus")
def get_ensembles_dir(self):
return os.path.join(self.homedir, ".pegasus", "ensembles")
def get_master_db(self):
return os.path.join(self.homedir, ".pegasus", "workflow.db")
def get_master_db_url(self):
return "sqlite:///%s" % self.get_master_db()
def __user_from_pwd(pw):
return User(pw.pw_uid, pw.pw_gid, pw.pw_name, pw.pw_dir)
def get_user_by_uid(uid):
try:
pw = pwd.getpwuid(uid)
return __user_from_pwd(pw)
except KeyError:
raise NoSuchUser(uid)
def get_user_by_username(username):
try:
pw = pwd.getpwnam(username)
return __user_from_pwd(pw)
except KeyError:
raise NoSuchUser(username)
except TypeError:
raise NoSuchUser(username)
|
pegasus-isi/pegasus
|
packages/pegasus-python/src/Pegasus/user.py
|
Python
|
apache-2.0
| 1,084
|
from flask import g, jsonify
def register_token_endpoint(app, auth):
@app.route('/generate_token')
@auth.login_required
def get_auth_token():
token = g.user.generate_auth_token()
return jsonify({'token': token.decode('ascii')})
|
totokaka/Powerfulperms-web
|
backend/backend/token_endpoint.py
|
Python
|
gpl-3.0
| 258
|
#!/usr/bin/python
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all cities available to target.
A full list of available tables can be found at
https://developers.google.com/doubleclick-publishers/docs/reference/v201306/PublisherQueryLanguageService
"""
__author__ = 'api.shamjeff@gmail.com (Jeff Sham)'
# Locate the client library. If module was installed via "setup.py" script, then
# the following two lines are not needed.
import os
import sys
sys.path.insert(0, os.path.join('..', '..', '..', '..'))
# Import appropriate classes from the client library.
from adspygoogle import DfpClient
# Initialize client object.
client = DfpClient(path=os.path.join('..', '..', '..', '..'))
# Initialize appropriate service.
pql_service = client.GetService(
'PublisherQueryLanguageService', version='v201306')
# Create statement to select all targetable cities.
# A limit of 500 is set here. You may want to page through such a large
# result set.
# For criteria that do not have a "targetable" property, that predicate
# may be left off, i.e. just "SELECT * FROM Browser_Groups LIMIT 500"
select_statement = {'query':
'SELECT * FROM City WHERE targetable = true LIMIT 500'}
# Get cities by statement.
result_set = pql_service.Select(select_statement)[0]
# Display results.
if result_set:
column_labels = [label['labelName'] for label in result_set['columnTypes']]
print 'Columns are: %s' % ', '.join(column_labels)
for row in result_set['rows']:
values = [value.get('value', '') for value in row['values']]
print 'Values are: %s' % ', '.join(values).encode('utf-8')
else:
print 'No results found.'
|
lociii/googleads-python-lib
|
examples/adspygoogle/dfp/v201306/get_all_cities.py
|
Python
|
apache-2.0
| 2,215
|
#!/usr/bin/env python
# Copyright (c) 2014, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree. An additional grant
# of patent rights can be found in the PATENTS file in the same directory.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
import subprocess
import unittest
# osquery-specific testing utils
import test_base
from utils import platform
def allowed_platform(qp):
if qp in ["all", "any"]:
return True
if len(qp) == 0:
return True
return qp.find(platform()) >= 0
class ReleaseTests(test_base.QueryTester):
def test_pack_queries(self):
packs = {}
PACKS_DIR = SOURCE_DIR + "/packs"
for root, dirs, files in os.walk(PACKS_DIR):
for name in files:
with open(os.path.join(PACKS_DIR, name), 'r') as fh:
packs[name] = json.loads(fh.read())
for name, pack in packs.items():
if "queries" not in pack:
continue
if "platform" in pack and not allowed_platform(pack["platform"]):
continue
queries = []
for query_name, query in pack["queries"].items():
qp = query["platform"] if "platform" in query else ""
if allowed_platform(qp):
queries.append(query["query"])
self._execute_set(queries)
def test_no_avx_instructions(self):
if platform() == "darwin":
tool = "otool -tV"
else:
tool = "objdump -d"
proc = subprocess.call(
"%s %s | grep vxorps" % (tool, self.binary), shell=True)
# Require no AVX instructions
self.assertEqual(proc, 1)
def test_no_local_link(self):
if platform() == "darwin":
tool = "otool -L"
else:
tool = "ldd"
proc = subprocess.call(
"%s %s | grep /usr/local/" % (tool, self.binary), shell=True)
# Require no local dynamic dependent links.
self.assertEqual(proc, 1)
if __name__ == '__main__':
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SOURCE_DIR = os.path.abspath(SCRIPT_DIR + "/../../")
module = test_base.Tester()
# Find and import the thrift-generated python interface
test_base.loadThriftFromBuild(test_base.ARGS.build)
module.run()
|
kk9599/osquery
|
tools/tests/test_release.py
|
Python
|
bsd-3-clause
| 2,527
|
# -*- coding: ibm850 -*-
template_typed = """
#ifdef TYPED_METHOD_BIND
template<class T $ifret ,class R$ $ifargs ,$ $arg, class P@$>
class MethodBind$argc$$ifret R$$ifconst C$ : public MethodBind {
public:
$ifret R$ $ifnoret void$ (T::*method)($arg, P@$) $ifconst const$;
#ifdef DEBUG_METHODS_ENABLED
virtual Variant::Type _gen_argument_type(int p_arg) const { return _get_argument_type(p_arg); }
virtual GodotTypeInfo::Metadata get_argument_meta(int p_arg) const {
$ifret if (p_arg==-1) return GetTypeInfo<R>::METADATA;$
$arg if (p_arg==(@-1)) return GetTypeInfo<P@>::METADATA;
$
return GodotTypeInfo::METADATA_NONE;
}
Variant::Type _get_argument_type(int p_argument) const {
$ifret if (p_argument==-1) return (Variant::Type)GetTypeInfo<R>::VARIANT_TYPE;$
$arg if (p_argument==(@-1)) return (Variant::Type)GetTypeInfo<P@>::VARIANT_TYPE;
$
return Variant::NIL;
}
virtual PropertyInfo _gen_argument_type_info(int p_argument) const {
$ifret if (p_argument==-1) return GetTypeInfo<R>::get_class_info();$
$arg if (p_argument==(@-1)) return GetTypeInfo<P@>::get_class_info();
$
return PropertyInfo();
}
#endif
virtual String get_instance_class() const {
return T::get_class_static();
}
virtual Variant call(Object* p_object,const Variant** p_args,int p_arg_count, Callable::CallError& r_error) {
T *instance=Object::cast_to<T>(p_object);
r_error.error=Callable::CallError::CALL_OK;
#ifdef DEBUG_METHODS_ENABLED
ERR_FAIL_COND_V(!instance,Variant());
if (p_arg_count>get_argument_count()) {
r_error.error=Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
r_error.argument=get_argument_count();
return Variant();
}
if (p_arg_count<(get_argument_count()-get_default_argument_count())) {
r_error.error=Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
r_error.argument=get_argument_count()-get_default_argument_count();
return Variant();
}
$arg CHECK_ARG(@);
$
#endif
$ifret Variant ret = $(instance->*method)($arg, _VC(@)$);
$ifret return Variant(ret);$
$ifnoret return Variant();$
}
#ifdef PTRCALL_ENABLED
virtual void ptrcall(Object*p_object,const void** p_args,void *r_ret) {
T *instance=Object::cast_to<T>(p_object);
$ifret PtrToArg<R>::encode( $ (instance->*method)($arg, PtrToArg<P@>::convert(p_args[@-1])$) $ifret ,r_ret)$ ;
}
#endif
MethodBind$argc$$ifret R$$ifconst C$ () {
#ifdef DEBUG_METHODS_ENABLED
_set_const($ifconst true$$ifnoconst false$);
_generate_argument_types($argc$);
#else
set_argument_count($argc$);
#endif
$ifret _set_returns(true); $
}
};
template<class T $ifret ,class R$ $ifargs ,$ $arg, class P@$>
MethodBind* create_method_bind($ifret R$ $ifnoret void$ (T::*p_method)($arg, P@$) $ifconst const$ ) {
MethodBind$argc$$ifret R$$ifconst C$<T $ifret ,R$ $ifargs ,$ $arg, P@$> * a = memnew( (MethodBind$argc$$ifret R$$ifconst C$<T $ifret ,R$ $ifargs ,$ $arg, P@$>) );
a->method=p_method;
return a;
}
#endif
"""
template = """
#ifndef TYPED_METHOD_BIND
$iftempl template<$ $ifret class R$ $ifretargs ,$ $arg, class P@$ $iftempl >$
class MethodBind$argc$$ifret R$$ifconst C$ : public MethodBind {
public:
StringName type_name;
$ifret R$ $ifnoret void$ (__UnexistingClass::*method)($arg, P@$) $ifconst const$;
#ifdef DEBUG_METHODS_ENABLED
virtual Variant::Type _gen_argument_type(int p_arg) const { return _get_argument_type(p_arg); }
virtual GodotTypeInfo::Metadata get_argument_meta(int p_arg) const {
$ifret if (p_arg==-1) return GetTypeInfo<R>::METADATA;$
$arg if (p_arg==(@-1)) return GetTypeInfo<P@>::METADATA;
$
return GodotTypeInfo::METADATA_NONE;
}
Variant::Type _get_argument_type(int p_argument) const {
$ifret if (p_argument==-1) return (Variant::Type)GetTypeInfo<R>::VARIANT_TYPE;$
$arg if (p_argument==(@-1)) return (Variant::Type)GetTypeInfo<P@>::VARIANT_TYPE;
$
return Variant::NIL;
}
virtual PropertyInfo _gen_argument_type_info(int p_argument) const {
$ifret if (p_argument==-1) return GetTypeInfo<R>::get_class_info();$
$arg if (p_argument==(@-1)) return GetTypeInfo<P@>::get_class_info();
$
return PropertyInfo();
}
#endif
virtual String get_instance_class() const {
return type_name;
}
virtual Variant call(Object* p_object,const Variant** p_args,int p_arg_count, Callable::CallError& r_error) {
__UnexistingClass *instance = (__UnexistingClass*)p_object;
r_error.error=Callable::CallError::CALL_OK;
#ifdef DEBUG_METHODS_ENABLED
ERR_FAIL_COND_V(!instance,Variant());
if (p_arg_count>get_argument_count()) {
r_error.error=Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
r_error.argument=get_argument_count();
return Variant();
}
if (p_arg_count<(get_argument_count()-get_default_argument_count())) {
r_error.error=Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
r_error.argument=get_argument_count()-get_default_argument_count();
return Variant();
}
$arg CHECK_ARG(@);
$
#endif
$ifret Variant ret = $(instance->*method)($arg, _VC(@)$);
$ifret return Variant(ret);$
$ifnoret return Variant();$
}
#ifdef PTRCALL_ENABLED
virtual void ptrcall(Object*p_object,const void** p_args,void *r_ret) {
__UnexistingClass *instance = (__UnexistingClass*)p_object;
$ifret PtrToArg<R>::encode( $ (instance->*method)($arg, PtrToArg<P@>::convert(p_args[@-1])$) $ifret ,r_ret) $ ;
}
#endif
MethodBind$argc$$ifret R$$ifconst C$ () {
#ifdef DEBUG_METHODS_ENABLED
_set_const($ifconst true$$ifnoconst false$);
_generate_argument_types($argc$);
#else
set_argument_count($argc$);
#endif
$ifret _set_returns(true); $
}
};
template<class T $ifret ,class R$ $ifargs ,$ $arg, class P@$>
MethodBind* create_method_bind($ifret R$ $ifnoret void$ (T::*p_method)($arg, P@$) $ifconst const$ ) {
MethodBind$argc$$ifret R$$ifconst C$ $iftempl <$ $ifret R$ $ifretargs ,$ $arg, P@$ $iftempl >$ * a = memnew( (MethodBind$argc$$ifret R$$ifconst C$ $iftempl <$ $ifret R$ $ifretargs ,$ $arg, P@$ $iftempl >$) );
union {
$ifret R$ $ifnoret void$ (T::*sm)($arg, P@$) $ifconst const$;
$ifret R$ $ifnoret void$ (__UnexistingClass::*dm)($arg, P@$) $ifconst const$;
} u;
u.sm=p_method;
a->method=u.dm;
a->type_name=T::get_class_static();
return a;
}
#endif
"""
template_typed_free_func = """
#ifdef TYPED_METHOD_BIND
template<class T $ifret ,class R$ $ifargs ,$ $arg, class P@$>
class FunctionBind$argc$$ifret R$$ifconst C$ : public MethodBind {
public:
$ifret R$ $ifnoret void$ (*method) ($ifconst const$ T *$ifargs , $$arg, P@$);
#ifdef DEBUG_METHODS_ENABLED
virtual Variant::Type _gen_argument_type(int p_arg) const { return _get_argument_type(p_arg); }
virtual GodotTypeInfo::Metadata get_argument_meta(int p_arg) const {
$ifret if (p_arg==-1) return GetTypeInfo<R>::METADATA;$
$arg if (p_arg==(@-1)) return GetTypeInfo<P@>::METADATA;
$
return GodotTypeInfo::METADATA_NONE;
}
Variant::Type _get_argument_type(int p_argument) const {
$ifret if (p_argument==-1) return (Variant::Type)GetTypeInfo<R>::VARIANT_TYPE;$
$arg if (p_argument==(@-1)) return (Variant::Type)GetTypeInfo<P@>::VARIANT_TYPE;
$
return Variant::NIL;
}
virtual PropertyInfo _gen_argument_type_info(int p_argument) const {
$ifret if (p_argument==-1) return GetTypeInfo<R>::get_class_info();$
$arg if (p_argument==(@-1)) return GetTypeInfo<P@>::get_class_info();
$
return PropertyInfo();
}
#endif
virtual String get_instance_class() const {
return T::get_class_static();
}
virtual Variant call(Object* p_object,const Variant** p_args,int p_arg_count, Callable::CallError& r_error) {
T *instance=Object::cast_to<T>(p_object);
r_error.error=Callable::CallError::CALL_OK;
#ifdef DEBUG_METHODS_ENABLED
ERR_FAIL_COND_V(!instance,Variant());
if (p_arg_count>get_argument_count()) {
r_error.error=Callable::CallError::CALL_ERROR_TOO_MANY_ARGUMENTS;
r_error.argument=get_argument_count();
return Variant();
}
if (p_arg_count<(get_argument_count()-get_default_argument_count())) {
r_error.error=Callable::CallError::CALL_ERROR_TOO_FEW_ARGUMENTS;
r_error.argument=get_argument_count()-get_default_argument_count();
return Variant();
}
$arg CHECK_ARG(@);
$
#endif
$ifret Variant ret = $(method)(instance$ifargs , $$arg, _VC(@)$);
$ifret return Variant(ret);$
$ifnoret return Variant();$
}
#ifdef PTRCALL_ENABLED
virtual void ptrcall(Object*p_object,const void** p_args,void *r_ret) {
T *instance=Object::cast_to<T>(p_object);
$ifret PtrToArg<R>::encode( $ (method)(instance$ifargs , $$arg, PtrToArg<P@>::convert(p_args[@-1])$) $ifret ,r_ret)$ ;
}
#endif
FunctionBind$argc$$ifret R$$ifconst C$ () {
#ifdef DEBUG_METHODS_ENABLED
_set_const($ifconst true$$ifnoconst false$);
_generate_argument_types($argc$);
#else
set_argument_count($argc$);
#endif
$ifret _set_returns(true); $
}
};
template<class T $ifret ,class R$ $ifargs ,$ $arg, class P@$>
MethodBind* create_method_bind($ifret R$ $ifnoret void$ (*p_method)($ifconst const$ T *$ifargs , $$arg, P@$) ) {
FunctionBind$argc$$ifret R$$ifconst C$<T $ifret ,R$ $ifargs ,$ $arg, P@$> * a = memnew( (FunctionBind$argc$$ifret R$$ifconst C$<T $ifret ,R$ $ifargs ,$ $arg, P@$>) );
a->method=p_method;
return a;
}
#endif
"""
def make_version(template, nargs, argmax, const, ret):
intext = template
from_pos = 0
outtext = ""
while True:
to_pos = intext.find("$", from_pos)
if to_pos == -1:
outtext += intext[from_pos:]
break
else:
outtext += intext[from_pos:to_pos]
end = intext.find("$", to_pos + 1)
if end == -1:
break # ignore
macro = intext[to_pos + 1 : end]
cmd = ""
data = ""
if macro.find(" ") != -1:
cmd = macro[0 : macro.find(" ")]
data = macro[macro.find(" ") + 1 :]
else:
cmd = macro
if cmd == "argc":
outtext += str(nargs)
if cmd == "ifret" and ret:
outtext += data
if cmd == "ifargs" and nargs:
outtext += data
if cmd == "ifretargs" and nargs and ret:
outtext += data
if cmd == "ifconst" and const:
outtext += data
elif cmd == "ifnoconst" and not const:
outtext += data
elif cmd == "ifnoret" and not ret:
outtext += data
elif cmd == "iftempl" and (nargs > 0 or ret):
outtext += data
elif cmd == "arg,":
for i in range(1, nargs + 1):
if i > 1:
outtext += ", "
outtext += data.replace("@", str(i))
elif cmd == "arg":
for i in range(1, nargs + 1):
outtext += data.replace("@", str(i))
elif cmd == "noarg":
for i in range(nargs + 1, argmax + 1):
outtext += data.replace("@", str(i))
from_pos = end + 1
return outtext
def run(target, source, env):
versions = 15
versions_ext = 6
text = ""
text_ext = ""
text_free_func = "#ifndef METHOD_BIND_FREE_FUNC_H\n#define METHOD_BIND_FREE_FUNC_H\n"
text_free_func += "\n//including this header file allows method binding to use free functions\n"
text_free_func += (
"//note that the free function must have a pointer to an instance of the class as its first parameter\n"
)
for i in range(0, versions + 1):
t = ""
t += make_version(template, i, versions, False, False)
t += make_version(template_typed, i, versions, False, False)
t += make_version(template, i, versions, False, True)
t += make_version(template_typed, i, versions, False, True)
t += make_version(template, i, versions, True, False)
t += make_version(template_typed, i, versions, True, False)
t += make_version(template, i, versions, True, True)
t += make_version(template_typed, i, versions, True, True)
if i >= versions_ext:
text_ext += t
else:
text += t
text_free_func += make_version(template_typed_free_func, i, versions, False, False)
text_free_func += make_version(template_typed_free_func, i, versions, False, True)
text_free_func += make_version(template_typed_free_func, i, versions, True, False)
text_free_func += make_version(template_typed_free_func, i, versions, True, True)
text_free_func += "#endif"
with open(target[0], "w") as f:
f.write(text)
with open(target[1], "w") as f:
f.write(text_ext)
with open(target[2], "w") as f:
f.write(text_free_func)
if __name__ == "__main__":
from platform_methods import subprocess_main
subprocess_main(globals())
|
Paulloz/godot
|
core/make_binders.py
|
Python
|
mit
| 12,675
|
# Copyright 2014: Intel Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.benchmark.scenarios.neutron import network
from tests.unit import test
NEUTRON_NETWORKS = "rally.benchmark.scenarios.neutron.network.NeutronNetworks"
class NeutronNetworksTestCase(test.TestCase):
@mock.patch(NEUTRON_NETWORKS + "._list_networks")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_list_networks(self, mock_create, mock_list):
neutron_scenario = network.NeutronNetworks()
# Default options
network_create_args = {}
neutron_scenario.create_and_list_networks(
network_create_args=network_create_args)
mock_create.assert_called_once_with(network_create_args)
mock_list.assert_called_once_with()
mock_create.reset_mock()
mock_list.reset_mock()
# Explicit network name is specified
network_create_args = {"name": "given-name"}
neutron_scenario.create_and_list_networks(
network_create_args=network_create_args)
mock_create.assert_called_once_with(network_create_args)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._update_network")
@mock.patch(NEUTRON_NETWORKS + "._create_network", return_value={
"network": {
"id": "network-id",
"name": "network-name",
"admin_state_up": False
}
})
def test_create_and_update_networks(self,
mock_create_network,
mock_update_network):
scenario = network.NeutronNetworks()
network_update_args = {"name": "_updated", "admin_state_up": True}
# Default options
scenario.create_and_update_networks(
network_update_args=network_update_args)
mock_create_network.assert_called_once_with({})
mock_update_network.assert_has_calls(
[mock.call(mock_create_network.return_value, network_update_args)])
mock_create_network.reset_mock()
mock_update_network.reset_mock()
# Explicit network name is specified
network_create_args = {"name": "network-name", "admin_state_up": False}
scenario.create_and_update_networks(
network_create_args=network_create_args,
network_update_args=network_update_args)
mock_create_network.assert_called_once_with(network_create_args)
mock_update_network.assert_has_calls(
[mock.call(mock_create_network.return_value, network_update_args)])
@mock.patch(NEUTRON_NETWORKS + "._delete_network")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_delete_networks(self, mock_create, mock_delete):
neutron_scenario = network.NeutronNetworks()
# Default options
network_create_args = {}
neutron_scenario.create_and_delete_networks()
mock_create.assert_called_once_with(network_create_args)
self.assertEqual(1, mock_delete.call_count)
mock_create.reset_mock()
mock_delete.reset_mock()
# Explict network name is specified
network_create_args = {"name": "given-name"}
neutron_scenario.create_and_delete_networks(
network_create_args=network_create_args)
mock_create.assert_called_once_with(network_create_args)
self.assertEqual(1, mock_delete.call_count)
@mock.patch(NEUTRON_NETWORKS + "._list_subnets")
@mock.patch(NEUTRON_NETWORKS + "._create_network_and_subnets")
def test_create_and_list_subnets(self,
mock_create_network_and_subnets,
mock_list):
scenario = network.NeutronNetworks()
subnets_per_network = 4
subnet_cidr_start = "default_cidr"
mock_create_network_and_subnets.reset_mock()
mock_list.reset_mock()
# Default options
scenario.create_and_list_subnets(
subnets_per_network=subnets_per_network,
subnet_cidr_start=subnet_cidr_start)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {}, subnets_per_network,
subnet_cidr_start)])
mock_list.assert_called_once_with()
mock_create_network_and_subnets.reset_mock()
mock_list.reset_mock()
# Custom options
scenario.create_and_list_subnets(
subnet_create_args={"allocation_pools": []},
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {"allocation_pools": []},
subnets_per_network, "custom_cidr")])
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._update_subnet")
@mock.patch(NEUTRON_NETWORKS + "._create_network_and_subnets")
def test_create_and_update_subnets(self,
mock_create_network_and_subnets,
mock_update_subnet):
scenario = network.NeutronNetworks()
subnets_per_network = 1
subnet_cidr_start = "default_cidr"
net = {
"network": {
"id": "network-id"
}
}
subnet = {
"subnet": {
"name": "subnet-name",
"id": "subnet-id",
"enable_dhcp": False
}
}
mock_create_network_and_subnets.return_value = (net, [subnet])
subnet_update_args = {"name": "_updated", "enable_dhcp": True}
mock_create_network_and_subnets.reset_mock()
mock_update_subnet.reset_mock()
# Default options
scenario.create_and_update_subnets(
subnet_update_args=subnet_update_args,
subnet_cidr_start=subnet_cidr_start,
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {}, subnets_per_network, subnet_cidr_start)])
mock_update_subnet.assert_has_calls(
[mock.call(subnet, subnet_update_args)])
mock_create_network_and_subnets.reset_mock()
mock_update_subnet.reset_mock()
# Custom options
subnet_cidr_start = "custom_cidr"
scenario.create_and_update_subnets(
subnet_update_args=subnet_update_args,
subnet_create_args={"allocation_pools": []},
subnet_cidr_start=subnet_cidr_start,
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {"allocation_pools": []}, subnets_per_network,
subnet_cidr_start)])
mock_update_subnet.assert_has_calls(
[mock.call(subnet, subnet_update_args)])
@mock.patch(NEUTRON_NETWORKS + "._delete_subnet")
@mock.patch(NEUTRON_NETWORKS + "._create_network_and_subnets")
def test_create_and_delete_subnets(self,
mock_create_network_and_subnets,
mock_delete):
scenario = network.NeutronNetworks()
net = {
"network": {
"id": "network-id"
}
}
subnet = {
"subnet": {
"name": "subnet-name",
"id": "subnet-id",
"enable_dhcp": False
}
}
mock_create_network_and_subnets.return_value = (net, [subnet])
subnets_per_network = 1
subnet_cidr_start = "default_cidr"
mock_create_network_and_subnets.reset_mock()
mock_delete.reset_mock()
# Default options
scenario.create_and_delete_subnets(
subnets_per_network=subnets_per_network,
subnet_cidr_start=subnet_cidr_start)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {}, subnets_per_network,
subnet_cidr_start)])
mock_delete.assert_has_calls([mock.call(subnet)])
mock_create_network_and_subnets.reset_mock()
mock_delete.reset_mock()
# Custom options
subnet_cidr_start = "custom_cidr"
scenario.create_and_delete_subnets(
subnet_create_args={"allocation_pools": []},
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {"allocation_pools": []}, subnets_per_network,
subnet_cidr_start)])
mock_delete.assert_has_calls([mock.call(subnet)])
@mock.patch(NEUTRON_NETWORKS + "._list_routers")
@mock.patch(NEUTRON_NETWORKS + "._create_router")
@mock.patch(NEUTRON_NETWORKS + "._create_network_and_subnets")
@mock.patch(NEUTRON_NETWORKS + ".clients")
def test_create_and_list_routers(self,
mock_clients,
mock_create_network_and_subnets,
mock_create_router,
mock_list):
scenario = network.NeutronNetworks()
subnets_per_network = 1
subnet_cidr_start = "default_cidr"
net = {
"network": {
"id": "network-id"
}
}
subnet = {
"subnet": {
"name": "subnet-name",
"id": "subnet-id",
"enable_dhcp": False
}
}
mock_create_network_and_subnets.return_value = (net, [subnet])
mock_clients("neutron").add_interface_router = mock.Mock()
router = {
"router": {
"name": "router-name",
"id": "router-id"
}
}
mock_create_router.return_value = router
# Default options
scenario.create_and_list_routers(
subnet_cidr_start=subnet_cidr_start,
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {}, subnets_per_network, subnet_cidr_start)])
mock_create_router.assert_has_calls(
[mock.call({})] * subnets_per_network)
mock_clients("neutron").add_interface_router.assert_has_calls(
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_create_network_and_subnets.reset_mock()
mock_create_router.reset_mock()
mock_clients("neutron").add_interface_router.reset_mock()
mock_list.reset_mock()
# Custom options
subnet_cidr_start = "custom_cidr"
subnet_create_args = {"allocation_pools": []}
router_create_args = {"admin_state_up": False}
scenario.create_and_list_routers(
subnet_create_args=subnet_create_args,
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network,
router_create_args=router_create_args)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, subnet_create_args, subnets_per_network,
subnet_cidr_start)])
mock_create_router.assert_has_calls(
[mock.call(router_create_args)] * subnets_per_network)
mock_clients("neutron").add_interface_router.assert_has_calls(
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._update_router")
@mock.patch(NEUTRON_NETWORKS + "._create_router")
@mock.patch(NEUTRON_NETWORKS + "._create_network_and_subnets")
@mock.patch(NEUTRON_NETWORKS + ".clients")
def test_create_and_update_routers(self,
mock_clients,
mock_create_network_and_subnets,
mock_create_router,
mock_update_router):
scenario = network.NeutronNetworks()
subnets_per_network = 1
subnet_cidr_start = "default_cidr"
net = {
"network": {
"id": "network-id"
}
}
subnet = {
"subnet": {
"name": "subnet-name",
"id": "subnet-id",
"enable_dhcp": False
}
}
router = {
"router": {
"name": "router-name",
"id": "router-id"
}
}
router_update_args = {
"name": "_updated",
"admin_state_up": False
}
mock_create_router.return_value = router
mock_create_network_and_subnets.return_value = (net, [subnet])
mock_clients("neutron").add_interface_router = mock.Mock()
# Default options
scenario.create_and_update_routers(
router_update_args=router_update_args,
subnet_cidr_start=subnet_cidr_start,
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {}, subnets_per_network, subnet_cidr_start)])
mock_create_router.assert_has_calls(
[mock.call({})] * subnets_per_network)
mock_clients("neutron").add_interface_router.assert_has_calls(
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_update_router.assert_has_calls(
[mock.call(router, router_update_args)
] * subnets_per_network)
mock_create_network_and_subnets.reset_mock()
mock_create_router.reset_mock()
mock_clients("neutron").add_interface_router.reset_mock()
mock_update_router.reset_mock()
# Custom options
subnet_cidr_start = "custom_cidr"
subnet_create_args = {"allocation_pools": []}
router_create_args = {"admin_state_up": False}
scenario.create_and_update_routers(
router_update_args=router_update_args,
subnet_create_args=subnet_create_args,
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network,
router_create_args=router_create_args)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, subnet_create_args, subnets_per_network,
subnet_cidr_start)])
mock_create_router.assert_has_calls(
[mock.call(router_create_args)] * subnets_per_network)
mock_clients("neutron").add_interface_router.assert_has_calls(
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_update_router.assert_has_calls(
[mock.call(router, router_update_args)
] * subnets_per_network)
@mock.patch(NEUTRON_NETWORKS + "._delete_router")
@mock.patch(NEUTRON_NETWORKS + "._create_router")
@mock.patch(NEUTRON_NETWORKS + "._create_network_and_subnets")
@mock.patch(NEUTRON_NETWORKS + ".clients")
def test_create_and_delete_routers(self,
mock_clients,
mock_create_network_and_subnets,
mock_create_router,
mock_delete_router):
scenario = network.NeutronNetworks()
subnets_per_network = 1
subnet_cidr_start = "default_cidr"
net = {
"network": {
"id": "network-id"
}
}
subnet = {
"subnet": {
"name": "subnet-name",
"id": "subnet-id",
"enable_dhcp": False
}
}
router = {
"router": {
"name": "router-name",
"id": "router-id"
}
}
mock_create_router.return_value = router
mock_create_network_and_subnets.return_value = (net, [subnet])
mock_clients("neutron").add_interface_router = mock.Mock()
# Default options
scenario.create_and_delete_routers(
subnet_cidr_start=subnet_cidr_start,
subnets_per_network=subnets_per_network)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, {}, subnets_per_network, subnet_cidr_start)])
mock_create_router.assert_has_calls(
[mock.call({})] * subnets_per_network)
mock_clients("neutron").add_interface_router.assert_has_calls(
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_delete_router.assert_has_calls(
[mock.call(router)] * subnets_per_network)
mock_create_network_and_subnets.reset_mock()
mock_create_router.reset_mock()
mock_clients("neutron").add_interface_router.reset_mock()
mock_delete_router.reset_mock()
# Custom options
subnet_cidr_start = "custom_cidr"
subnet_create_args = {"allocation_pools": []}
router_create_args = {"admin_state_up": False}
scenario.create_and_delete_routers(
subnet_create_args=subnet_create_args,
subnet_cidr_start="custom_cidr",
subnets_per_network=subnets_per_network,
router_create_args=router_create_args)
mock_create_network_and_subnets.assert_has_calls(
[mock.call({}, subnet_create_args, subnets_per_network,
subnet_cidr_start)])
mock_create_router.assert_has_calls(
[mock.call(router_create_args)] * subnets_per_network)
mock_clients("neutron").add_interface_router.assert_has_calls(
[mock.call(router["router"]["id"],
{"subnet_id": subnet["subnet"]["id"]})
] * subnets_per_network)
mock_delete_router.assert_has_calls(
[mock.call(router)] * subnets_per_network)
@mock.patch(NEUTRON_NETWORKS + "._generate_random_name")
@mock.patch(NEUTRON_NETWORKS + "._list_ports")
@mock.patch(NEUTRON_NETWORKS + "._create_port")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_list_ports(self,
mock_create_network,
mock_create_port,
mock_list,
mock_random_name):
scenario = network.NeutronNetworks()
mock_random_name.return_value = "random-name"
net = {"network": {"id": "fake-id"}}
mock_create_network.return_value = net
ports_per_network = 10
self.assertRaises(TypeError, scenario.create_and_list_ports)
mock_create_network.reset_mock()
# Defaults
scenario.create_and_list_ports(ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(mock_create_port.mock_calls,
[mock.call(net, {})] * ports_per_network)
mock_list.assert_called_once_with()
mock_create_network.reset_mock()
mock_create_port.reset_mock()
mock_list.reset_mock()
# Custom options
scenario.create_and_list_ports(
network_create_args={"name": "given-name"},
port_create_args={"allocation_pools": []},
ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({"name": "given-name"})
self.assertEqual(
mock_create_port.mock_calls,
[mock.call(net, {"allocation_pools": []})] * ports_per_network)
mock_list.assert_called_once_with()
@mock.patch(NEUTRON_NETWORKS + "._generate_random_name")
@mock.patch(NEUTRON_NETWORKS + "._update_port")
@mock.patch(NEUTRON_NETWORKS + "._create_port", return_value={
"port": {
"name": "port-name",
"id": "port-id",
"admin_state_up": True
}
})
@mock.patch(NEUTRON_NETWORKS + "._create_network", return_value={
"network": {
"id": "fake-id"
}
})
def test_create_and_update_ports(self,
mock_create_network,
mock_create_port,
mock_update_port,
mock_random_name):
scenario = network.NeutronNetworks()
mock_random_name.return_value = "random-name"
ports_per_network = 10
port_update_args = {
"name": "_updated",
"admin_state_up": False
}
# Defaults
scenario.create_and_update_ports(
port_update_args=port_update_args,
ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({})
mock_create_port.assert_has_calls(
[mock.call({"network": {"id": "fake-id"}},
{})] * ports_per_network)
mock_update_port.assert_has_calls(
[mock.call(mock_create_port.return_value, port_update_args)
] * ports_per_network)
mock_create_network.reset_mock()
mock_create_port.reset_mock()
mock_update_port.reset_mock()
# Custom options
scenario.create_and_update_ports(
port_update_args=port_update_args,
network_create_args={"name": "given-name"},
port_create_args={"allocation_pools": []},
ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({"name": "given-name"})
mock_create_port.assert_has_calls(
[mock.call({"network": {"id": "fake-id"}},
{"allocation_pools": []})] * ports_per_network)
mock_update_port.assert_has_calls(
[mock.call(mock_create_port.return_value, port_update_args)
] * ports_per_network)
@mock.patch(NEUTRON_NETWORKS + "._generate_random_name")
@mock.patch(NEUTRON_NETWORKS + "._delete_port")
@mock.patch(NEUTRON_NETWORKS + "._create_port")
@mock.patch(NEUTRON_NETWORKS + "._create_network")
def test_create_and_delete_ports(self,
mock_create_network,
mock_create_port,
mock_delete,
mock_random_name):
scenario = network.NeutronNetworks()
mock_random_name.return_value = "random-name"
net = {"network": {"id": "fake-id"}}
mock_create_network.return_value = net
ports_per_network = 10
self.assertRaises(TypeError, scenario.create_and_delete_ports)
mock_create_network.reset_mock()
# Default options
scenario.create_and_delete_ports(ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({})
self.assertEqual(mock_create_port.mock_calls,
[mock.call(net, {})] * ports_per_network)
self.assertEqual(mock_delete.mock_calls,
[mock.call(mock_create_port())] * ports_per_network)
mock_create_network.reset_mock()
mock_create_port.reset_mock()
mock_delete.reset_mock()
# Custom options
scenario.create_and_delete_ports(
network_create_args={"name": "given-name"},
port_create_args={"allocation_pools": []},
ports_per_network=ports_per_network)
mock_create_network.assert_called_once_with({"name": "given-name"})
self.assertEqual(
mock_create_port.mock_calls,
[mock.call(net, {"allocation_pools": []})] * ports_per_network)
self.assertEqual(mock_delete.mock_calls,
[mock.call(mock_create_port())] * ports_per_network)
|
pandeyop/rally
|
tests/unit/benchmark/scenarios/neutron/test_network.py
|
Python
|
apache-2.0
| 24,839
|
# reverse the list in place
mylist = [1,2,3]
mylist.reverse()
print mylist
# [3,2,1]
# iterate in reverse
for e in reversed([1,2,3]):
print e,
# 3 2 1
|
jabbalaci/PrimCom
|
data/python/my_reverse.py
|
Python
|
gpl-2.0
| 156
|
# some examples on views and copies from the NumPy tutorial
import numpy as np
a = np.arange(12)
# assignments
print "assignments"
b = a # no new object is created -- b and a point to the same object
print b is a
b.shape = 3,4 # changes the shape of a too
print a.shape
print " "
print a
print b
print " "
b[1,1] = -1 # changes a[1,1] too
print a
print " "
# views / shallow copy
print "views"
c = a[:] # or c = a.view()
print c is a
print c.base is a
c.shape = 12
print a
print c
print " "
c[2] = 100.0
print a
print c
print " "
# deep copy
print "deep copying"
d = a.copy()
print a
print d
print " "
d[:,:] = 0.0
print a
print d
|
bt3gl/Numerical-Methods-for-Physics
|
others/python/numpy-basics/numpy-copying.py
|
Python
|
apache-2.0
| 667
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from math import sin, cos, pi, trunc, modf, floor, sqrt, atan2
from json import load
GRID_SIZE_LON=0.0001
GRID_SIZE_LAT=0.0001
GRID_ORIG_LON=0
GRID_ORIG_LAT=0
class DegreesToMeter(object):
def __init__(self, filename):
f = open(filename)
self.data = load(f)
f.close()
def getmetersperlat(self, lat):
fractional, integral = modf(lat)
key = "%dd%dm" % (integral, int(abs(fractional)*6)*10)
value = self.data[key]
return value["minutelatinmeters"] * 60
def getmetersperlon(self, lat):
fractional, integral = modf(lat)
key = "%dd%dm" % (integral, int(abs(fractional)*6)*10)
value = self.data[key]
return value["minuteloninmeters"] * 60
class BoundingBox(object):
def __init__(self):
self.min_lon = 180
self.max_lon = -180
self.min_lat = 90
self.max_lat = -90
def include(self, lon, lat):
self.min_lon = min(lon, self.min_lon)
self.min_lat = min(lat, self.min_lat)
self.max_lon = max(lon, self.max_lon)
self.max_lat = max(lat, self.max_lat)
def __str__(self):
return "min lon: %f, min lat: %f, max lon: %f, max lat: %f" % (self.min_lon, self.min_lat, self.max_lon, self.max_lat)
class Cell(object):
def __init__(self, x, y, angle, dist):
self.x = x
self.y = y
self.angle = angle
self.dist = dist
def __str__(self):
return "{x: %s, y:%s, angle: %s, dist:%s}" % (self.x, self.y, self.angle, self.dist)
def get_coord_grid(x, y):
"""
Returns center coordinate of grid
@param x the x grid coordinate
@param y the y grid coordinate
@return tuple of lon lat wgs84 coords
"""
glon = GRID_ORIG_LON + ((x + 0.5) * GRID_SIZE_LON)
glat = GRID_ORIG_LAT + ((y + 0.5) * GRID_SIZE_LAT)
return (glon, glat)
def get_grid_coord(lon, lat):
"""
@param lon wgs84 decimal longitude
@param lat wgs84 decimal latitude
@return the grid coord of where this coord is in
"""
glon = int(floor((lon - GRID_ORIG_LON) / GRID_SIZE_LON))
glat = int(floor((lat - GRID_ORIG_LAT) / GRID_SIZE_LAT))
return (glon, glat)
def cell_for_grid_coord(gx, gy, lon, lat, az1, az2, powerdist, check = True):
meterperdegreehorz = d2m.getmetersperlon(lat)
meterperdegreevert = d2m.getmetersperlat(lat)
cellcoords = get_coord_grid(gx, gy)
dxant=cellcoords[0]-lon
dyant=cellcoords[1]-lat
distant=sqrt((dxant**2) + (dyant**2))
normx=dxant/distant
normy=dyant/distant
angle = atan2(normy, normx) % (2 * pi)
dist=sqrt(((dxant * meterperdegreehorz)**2) + ((dyant * meterperdegreevert)**2))
if check:
if dist > powerdist:
return
if (az1 <= az2) and ((angle < az1) or (angle > az2)):
return
if (az1 > az2) and (angle < az1) and (angle > az2):
return
return Cell(cellcoords[0], cellcoords[1], angle, dist)
def gridcalc(lon, lat, azimuth, halfpower, beamwidth):
"""
@param lon wgs84 decimal longitude
@param lat wgs84 decimal latitude
@param azimut angle to north vector in degrees
@param halfpower distance of where power halves in meters
@beamwidth width of beam in degrees
"""
pi2 = pi / 2
meterperdegreehorz = d2m.getmetersperlon(lat)
meterperdegreevert = d2m.getmetersperlat(lat)
wlon = halfpower / meterperdegreehorz
wlat = halfpower / meterperdegreevert
az = ((azimuth * pi) / 180) + pi2
bw = (beamwidth * pi) / 180
bw2 = bw / 2
# calc p1
az1 = (az - bw2) % (2 * pi)
p1lon = lon + cos(az1) * wlon
p1lat = lat + sin(az1) * wlat
# calc p2
az2 = (az + bw2) % (2 * pi)
p2lon = lon + cos(az2) * wlon
p2lat = lat + sin(az2) * wlat
# calc bounding box not taking arc into account
bb = BoundingBox()
bb.include(lon, lat)
bb.include(p1lon, p1lat)
bb.include(p2lon, p2lat)
# extend for arc (arc goes from p1 to p2)
if az1 <= az2:
# arc does not cross east
# cross north?
if az1 < pi2 and az2 > pi2:
bb.include(lon, lat + wlat)
# cross west?
if az1 < pi and az2 > pi:
bb.include(lon - wlon, lat)
# cross south?
if az1 < (pi + pi2) and az2 > (pi + pi2):
bb.include(lon, lat - wlat)
else:
# arc crosses east
bb.include(lon + wlon, lat)
# cross north?
if az1 < pi2 or az2 > pi2:
bb.include(lon, lat + wlat)
# cross west?
if az1 < pi or az2 > pi:
bb.include(lon - wlon, lat)
# cross south?
if az1 < (pi + pi2) or az2 > (pi + pi2):
bb.include(lon, lat - wlat)
# calc overlapping grid points
gmin = get_grid_coord(bb.min_lon, bb.min_lat)
gmax = get_grid_coord(bb.max_lon, bb.max_lat)
antenna = get_grid_coord(lon, lat)
cells = []
if (antenna[0] < gmin[0]) or (antenna[0] > gmax[0]) or (antenna[1] < gmin[1]) or (antenna[1] > gmax[1]):
cell = cell_for_grid_coord(antenna[0], antenna[1], lon, lat, az1, az2, halfpower, check=False)
cells.append(cell)
for gx in range(gmin[0], gmax[0] + 1):
for gy in range(gmin[1], gmax[1] + 1):
cell = cell_for_grid_coord(gx, gy, lon, lat, az1, az2, halfpower)
if cell is None:
continue;
cells.append(cell)
return cells
d2m = DegreesToMeter("lonlat2meters.json")
def main():
num = 0
for i in range(100):
num += len(gridcalc(3.7777777777, 50.4567891234, -45, 20, 90))
print num
if __name__ == "__main__":
main()
|
JeroenDeDauw/a4g
|
gridcalc/gridcalc.py
|
Python
|
gpl-3.0
| 5,278
|
# This file is part of browser, and contains classes for embeding windows,
# using the XEmbed protocol, in a gtk Socket.
#
# Copyright (C) 2009-2010 Josiah Gordon <josiahg@gmail.com>
#
# browser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import wnck
from classes import *
class EmbedSock(gtk.Socket):
""" EmbedSock -> Socket object that starts an X app
and embeds it based on its xid """
__gsignals__ = {
'window-embeded' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'window-name-changed' : (gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,)),
'window-icon-changed' : (gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,)),
'closing' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
}
def __init__(self):
super(EmbedSock, self).__init__()
self._screen = wnck.screen_get_default()
self._screen.connect('window-opened', self._window_opened)
self._screen.connect('application-opened', self._application_opened)
self._window_connect_dict = {
'name-changed' : self._window_name_changed,
'icon-changed' : self._window_icon_changed,
}
self._app = None
def run(self, app):
""" run (app) ->
Run app and embed it when it opens """
self._app = subprocess.Popen(app)
def close(self):
self._app.send_signal(9)
self._app.kill()
self.emit('closing')
return True
def get_pid(self):
if self._app:
return self._app.pid
def get_app(self):
return self._app
def _application_opened(self, screen, application):
application_pid = application.get_pid()
if self._app:
if application_pid == self._app.pid:
for signal, callback in self._window_connect_dict.iteritems():
application.connect(signal, callback)
#self.add_id(application.get_xid())
def _window_opened(self, screen, window):
window_pid = window.get_pid()
if self._app:
if window_pid == self._app.pid:
for signal, callback in self._window_connect_dict.iteritems():
window.connect(signal, callback)
self.add_id(window.get_xid())
self.emit('window-embeded', window)
self.emit('window-icon-changed', window)
self.emit('window-name-changed', window)
def _window_icon_changed(self, window):
self.emit('window-icon-changed', window)
def _window_name_changed(self, window):
self.emit('window-name-changed', window)
class EmbedApp(gtk.Socket):
""" EmbedApp -> Socket object that starts an X app
and embeds it based on its xid """
__gsignals__ = {
'window-embeded' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'window-name-changed' : (gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,)),
'window-icon-changed' : (gobject.SIGNAL_RUN_LAST,
gobject.TYPE_NONE, (gobject.TYPE_PYOBJECT,)),
'title-changed' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
'closing' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, ()),
}
def __init__(self):
""" EmbedApp() -> A wrapper object to make it easier to embed an
external application window. It also handles catching the title and
icon of the embeded window so the parent object can use them as it
wishes.
"""
super(EmbedApp, self).__init__()
# Get the default screen and connect some signals to catch when a
# new window or application is opened.
self._screen = wnck.screen_get_default()
self._screen.connect('window-opened', self._window_opened)
self._screen.connect('application-opened', self._application_opened)
# Set the default title.
self._title = 'Embedded Window'
self._type = "EmbedApp"
# Create an object to hold the window icon of the embedded window.
self._icon = gtk.Image()
self._icon.show_all()
self._app = None
# Setup a dictionary to link signals emitted by a window with their
# appropriate signal handlers.
self._window_connect_dict = {
'name-changed' : self._window_name_changed,
'icon-changed' : self._window_icon_changed,
}
def run(self, app):
""" run (app) ->
Run app and embed it when it opens.
"""
self._app = subprocess.Popen(app)
def close(self):
""" close() -> Close the embedded app and emit a 'closing' signal so
its parent can do any cleanup necessary.
"""
if self._app:
self._app.send_signal(9)
self._app.kill()
self.emit('closing')
return True
def get_title(self):
""" get_title() -> Returns the saved title of the embedded window.
"""
return self._title
def get_icon(self):
""" get_icon() -> Returns the saved icon of the embedded window.
"""
return self._icon
def get_pid(self):
""" get_pid() -> Returns the pid of the embedded app.
"""
if self._app:
return self._app.pid
def get_app(self):
""" get_app() -> Returns the embedded app process.
"""
return self._app
def set_icon(self, icon_name):
""" set_icon(icon_name) -> Set the icon from icon_name.
"""
self._icon.set_from_icon_name(icon_name, gtk.ICON_SIZE_MENU)
def _application_opened(self, screen, application):
""" _application_opened(screen, application) -> If application is the
app started earlier then this method will connect some signals for
application to signal-handlers.
"""
application_pid = application.get_pid()
if self._app:
if application_pid == self._app.pid:
for signal, callback in self._window_connect_dict.iteritems():
application.connect(signal, callback)
self._screen.disconnect_by_func(self._application_opened)
def _window_opened(self, screen, window):
""" _window_opened(screen, window) -> If window is the app started
earlier then this method will connect some signals for window to
signal-handlers.
"""
window_pid = window.get_pid()
if self._app:
if window_pid == self._app.pid:
for signal, callback in self._window_connect_dict.iteritems():
window.connect(signal, callback)
self._screen.disconnect_by_func(self._window_opened)
self.add_id(window.get_xid())
self._window_icon_changed(window)
self._window_name_changed(window)
self.emit('window-embeded', window)
self.emit('title-changed', window.get_name())
def _window_icon_changed(self, window):
pixbuf_icon = window.get_icon()
if pixbuf_icon:
pixbuf_icon = pixbuf_icon.scale_simple(16, 16,
gtk.gdk.INTERP_BILINEAR)
self._icon.set_from_pixbuf(pixbuf_icon)
self.emit('window-icon-changed', window)
def _window_name_changed(self, window):
self._title = window.get_name()
self.emit('window-name-changed', window)
@property
def type(self):
return self._type
|
zepto/webbrowser
|
webbrowser/embed_sock.py
|
Python
|
gpl-3.0
| 8,423
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
#############################################################################################
#This has been temporarily deprecated, please use schedule_workes from general utils instead#
#############################################################################################
import re
import os
import sys
from itertools import dropwhile, chain
from invenio.bibauthorid_general_utils import print_tortoise_memory_log
from invenio import bibauthorid_config as bconfig
from invenio.bibauthorid_general_utils import is_eq, update_status, update_status_final
#python2.4 compatibility
from invenio.bibauthorid_general_utils import bai_all as all
def to_number(stry):
return int(re.sub("\D", "", stry))
def dict_by_file(fpath):
fp = open(fpath)
content = fp.read()
fp.close()
return dict(x.split(':') for x in content.split("\n")[:-1])
def get_free_memory():
mem = dict_by_file("/proc/meminfo")
return sum(map(to_number, (mem['MemFree'], mem['Buffers'], mem['Cached'])))
def get_total_memory():
mem = dict_by_file("/proc/meminfo")
return to_number(mem['MemTotal'])
def get_peak_mem():
pid = os.getpid()
mem = dict_by_file("/proc/%d/status" % pid)
return map(to_number, (mem["VmPeak"], mem["VmHWM"]))
#matrix_coefs = [1133088., 0., 1.5]
#wedge_coefs = [800000., 0., 2.]
matrix_coefs = [1000., 500., 0.01]
wedge_coefs = [1000., 500., 0.02]
def get_biggest_job_below(lim, arr):
return dropwhile(lambda x: x[1] < lim, enumerate(chain(arr, [lim]))).next()[0] - 1
def get_cores_count():
import multiprocessing
return multiprocessing.cpu_count()
def schedule(jobs, sizs, estimator, memfile_path=None):
if bconfig.DEBUG_PROCESS_PEAK_MEMORY and memfile_path:
def register_memory_usage():
pid = os.getpid()
peak = get_peak_mem()
fp = open(memfile_path, 'a')
print_tortoise_memory_log(
{'pid' : pid,
'peak1': peak[0],
'peak2': peak[1],
'est' : sizs[idx],
'bibs' : bibs[idx]
},
fp
)
fp.close()
else:
def register_memory_usage():
pass
def run_job(idx):
try:
sys.stdout = output_killer
jobs[idx]()
register_memory_usage()
os._exit(os.EX_OK)
except Exception, e:
f = open('/tmp/exception-%s' % str(os.getpid()), "w")
f.write(str(e) + '\n')
f.close()
os._exit(os.EX_SOFTWARE)
max_workers = get_cores_count()
pid_2_idx = {}
#free = get_free_memory()
initial = get_total_memory()
free = initial
output_killer = open(os.devnull, 'w')
ret_status = [None] * len(jobs)
bibs = sizs
sizs = map(estimator, sizs)
free_idxs = range(len(jobs))
assert len(jobs) == len(sizs) == len(ret_status) == len(bibs) == len(free_idxs)
done = 0.
total = sum(sizs)
biggest = max(sizs)
update_status(0., "0 / %d" % len(jobs))
too_big = [idx for idx in free_idxs if sizs[idx] > free]
for idx in too_big:
pid = os.fork()
if pid == 0: # child
run_job(idx)
else: # parent
done += sizs[idx]
del free_idxs[idx]
cpid, status = os.wait()
update_status(done / total, "%d / %d" % (len(jobs) - len(free_idxs), len(jobs)))
ret_status[idx] = status
assert cpid == pid
while free_idxs or pid_2_idx:
while len(pid_2_idx) < max_workers:
idx = get_biggest_job_below(free, (sizs[idx] for idx in free_idxs))
if idx != -1:
job_idx = free_idxs[idx]
pid = os.fork()
if pid == 0: # child
os.nice(int((float(sizs[idx]) * 20.0 / biggest)))
run_job(job_idx)
else: # parent
pid_2_idx[pid] = job_idx
assert free > sizs[job_idx]
free -= sizs[job_idx]
del free_idxs[idx]
else:
break
pid, status = os.wait()
assert pid in pid_2_idx
idx = pid_2_idx[pid]
freed = sizs[idx]
done += freed
ret_status[idx] = status
free += freed
del pid_2_idx[pid]
update_status(done / total, "%d / %d" % (len(jobs) - len(free_idxs) - len(pid_2_idx), len(jobs)))
update_status_final("%d / %d" % (len(jobs), len(jobs)))
assert is_eq(free, initial)
assert not pid_2_idx
assert not free_idxs
assert len(jobs) == len(sizs) == len(ret_status) == len(bibs)
assert all(stat != None for stat in ret_status)
return ret_status
|
jmacmahon/invenio
|
modules/bibauthorid/lib/bibauthorid_scheduler.py
|
Python
|
gpl-2.0
| 5,558
|
import datetime
import json
import discord
import os
from discord.ext import commands
from discord.ext.commands.converter import *
class CassandraContext(commands.Context):
def is_float(self, argument):
"""Checks if the argument is a float."""
try:
return float(string) # True if string is a number contains a dot
except ValueError: # String is not a number
return False
async def send(self, content=None, *args, **kwargs):
"""Override for send to add message filtering"""
if content:
if self.is_float(content) or content.isdigit():
content = str(content)
content.replace("@everyone", "@\u200beveryone").replace("@here", "@\u200bhere")
sent_message = await super().send(content, *args, **kwargs)
return sent_message
@property
def session(self):
"""Returns the aiohttp.ClientSession() instance in CassandraBase."""
return self.bot.session
class CassandraBase(commands.Bot):
"""This is the class that initializes the bot."""
def __init__(self):
self.token = os.environ['TOKEN']
self.presence = discord.Game(name='in a Digital Haunt...',
url="https://www.twitch.tv/ghostofsparkles", type=1)
self.archive_file = []
def get_package_info():
"""Fetches `arg` in `package.json`."""
with open("./package.json") as f:
config = json.load(f)
return config
def get_prefix():
"""Fetches all known prefixes."""
prefixes = ["-",
"Cassandra "]
return commands.when_mentioned_or(*prefixes)
def get_description():
"""Fetches description."""
return f"{get_package_info()['name']}"
def get_game():
"""Fetches game presence."""
return self.presence
super().__init__(command_prefix=get_prefix(), game=get_game(), description=get_description(), pm_help=None,
help_attrs=dict(hidden=True))
startup_extensions = []
for file in os.listdir("./cogs"):
if file.endswith(".py"):
startup_extensions.append(file.replace('.py', ''))
for extension in startup_extensions:
try:
self.load_extension(f'cogs.{extension}')
print(f'Loaded {extension}')
except Exception as e:
error = f'{extension}\n {type(e).__name__}: {e}'
print(f'Failed to load extension {error}')
self.session = None
def run(self):
"""Runs the bot."""
super().run(self.token)
async def on_message(self, message):
"""An event triggered when a message is sent."""
ctx = await self.get_context(message, cls=CassandraContext)
await self.invoke(ctx)
async def fetch(self, url: str, headers: dict = None, timeout: float = None,
return_type: str = None, **kwargs):
"""Fetches data from a url via aiohttp."""
async with self.session.get(url, headers=headers, timeout=timeout, **kwargs) as resp:
if return_type:
cont = getattr(resp, return_type)
return resp, await cont()
else:
return resp, None
class Cassandra(CassandraBase):
pass
class ConvertError(Exception):
pass
class Union(Converter):
def __init__(self, *converters):
self.converters = converters
async def convert(self, ctx: CassandraContext, argument: str):
"""Converts an argument"""
for converter in self.converters:
try:
return await ctx.command.do_conversion(ctx, converter, argument)
except:
raise ConvertError('Conversion Failed.')
|
NCPlayz/CassBotPy
|
cassandra/bot.py
|
Python
|
mit
| 4,014
|
# -*- coding: utf-8 -*-
"""
Tests for tipfy.i18n
"""
from __future__ import with_statement
import datetime
import gettext as gettext_stdlib
import os
import unittest
from babel.numbers import NumberFormatError
from pytz.gae import pytz
from tipfy.app import App, Request, Response
from tipfy.handler import RequestHandler
from tipfy.routing import Rule
from tipfy.local import local, get_request
from tipfy.sessions import SessionMiddleware
import tipfy.i18n as i18n
import test_utils
class BaseTestCase(test_utils.BaseTestCase):
def setUp(self):
app = App(rules=[
Rule('/', name='home', handler=RequestHandler)
], config={
'tipfy.sessions': {
'secret_key': 'secret',
},
'tipfy.i18n': {
'timezone': 'UTC'
},
})
local.request = request = Request.from_values('/')
request.app = app
test_utils.BaseTestCase.setUp(self)
#==========================================================================
# I18nMiddleware
#==========================================================================
def test_middleware_multiple_changes(self):
class MyHandler(RequestHandler):
middleware = [SessionMiddleware(), i18n.I18nMiddleware()]
def get(self, **kwargs):
locale = self.i18n.locale
return Response(locale)
app = App(rules=[
Rule('/', name='home', handler=MyHandler)
], config={
'tipfy.sessions': {
'secret_key': 'secret',
},
'tipfy.i18n': {
'locale_request_lookup': [('args', 'lang'), ('session', '_locale')],
}
})
client = app.get_test_client()
response = client.get('/')
self.assertEqual(response.data, 'en_US')
response = client.get('/?lang=pt_BR')
self.assertEqual(response.data, 'pt_BR')
response = client.get('/')
self.assertEqual(response.data, 'pt_BR')
response = client.get('/?lang=en_US')
self.assertEqual(response.data, 'en_US')
response = client.get('/')
self.assertEqual(response.data, 'en_US')
#==========================================================================
# _(), gettext(), ngettext(), lazy_gettext(), lazy_ngettext()
#==========================================================================
def test_translations_not_set(self):
# We release it here because it is set on setUp()
local.__release_local__()
self.assertRaises(AttributeError, i18n.gettext, 'foo')
def test_gettext(self):
self.assertEqual(i18n.gettext('foo'), u'foo')
def test_gettext_(self):
self.assertEqual(i18n._('foo'), u'foo')
def test_gettext_with_variables(self):
self.assertEqual(i18n.gettext('foo %(foo)s'), u'foo %(foo)s')
self.assertEqual(i18n.gettext('foo %(foo)s') % {'foo': 'bar'}, u'foo bar')
self.assertEqual(i18n.gettext('foo %(foo)s', foo='bar'), u'foo bar')
def test_ngettext(self):
self.assertEqual(i18n.ngettext('One foo', 'Many foos', 1), u'One foo')
self.assertEqual(i18n.ngettext('One foo', 'Many foos', 2), u'Many foos')
def test_ngettext_with_variables(self):
self.assertEqual(i18n.ngettext('One foo %(foo)s', 'Many foos %(foo)s', 1), u'One foo %(foo)s')
self.assertEqual(i18n.ngettext('One foo %(foo)s', 'Many foos %(foo)s', 2), u'Many foos %(foo)s')
self.assertEqual(i18n.ngettext('One foo %(foo)s', 'Many foos %(foo)s', 1, foo='bar'), u'One foo bar')
self.assertEqual(i18n.ngettext('One foo %(foo)s', 'Many foos %(foo)s', 2, foo='bar'), u'Many foos bar')
self.assertEqual(i18n.ngettext('One foo %(foo)s', 'Many foos %(foo)s', 1) % {'foo': 'bar'}, u'One foo bar')
self.assertEqual(i18n.ngettext('One foo %(foo)s', 'Many foos %(foo)s', 2) % {'foo': 'bar'}, u'Many foos bar')
def test_lazy_gettext(self):
self.assertEqual(i18n.lazy_gettext('foo'), u'foo')
def test_lazy_ngettext(self):
self.assertEqual(i18n.lazy_ngettext('One foo', 'Many foos', 1), u'One foo')
self.assertEqual(i18n.lazy_ngettext('One foo', 'Many foos', 2), u'Many foos')
#==========================================================================
# I18nStore.get_store_for_request()
#==========================================================================
def get_app(self):
return App(rules=[
Rule('/', name='home', handler=RequestHandler)
], config={
'tipfy.sessions': {
'secret_key': 'secret',
},
'tipfy.i18n': {
'timezone': 'UTC'
},
})
def test_get_store_for_request(self):
app = self.get_app()
app.config['tipfy.i18n']['locale'] = 'jp_JP'
with app.get_test_handler('/') as handler:
self.assertEqual(handler.i18n.locale, 'jp_JP')
def test_get_store_for_request_args(self):
app = self.get_app()
app.config['tipfy.i18n']['locale_request_lookup'] = [('args', 'language')]
with app.get_test_handler('/', query_string={'language': 'es_ES'}) as handler:
self.assertEqual(handler.i18n.locale, 'es_ES')
def test_get_store_for_request_form(self):
app = self.get_app()
app.config['tipfy.i18n']['locale_request_lookup'] = [('form', 'language')]
with app.get_test_handler('/', data={'language': 'es_ES'}, method='POST') as handler:
self.assertEqual(handler.i18n.locale, 'es_ES')
def test_get_store_for_request_cookies(self):
app = self.get_app()
app.config['tipfy.i18n']['locale_request_lookup'] = [('cookies', 'language')]
with app.get_test_handler('/', headers=[('Cookie', 'language="es_ES"; Path=/')]) as handler:
self.assertEqual(handler.i18n.locale, 'es_ES')
def test_get_store_for_request_args_cookies(self):
app = self.get_app()
app.config['tipfy.i18n']['locale_request_lookup'] = [
('args', 'foo'),
('cookies', 'language')
]
with app.get_test_handler('/', headers=[('Cookie', 'language="es_ES"; Path=/')]) as handler:
self.assertEqual(handler.i18n.locale, 'es_ES')
def test_get_store_for_request_rule_args(self):
app = self.get_app()
app.config['tipfy.i18n']['locale_request_lookup'] = [('rule_args', 'locale'),]
with app.get_test_handler('/') as handler:
handler.request.rule_args = {'locale': 'es_ES'}
self.assertEqual(handler.i18n.locale, 'es_ES')
#==========================================================================
# Date formatting
#==========================================================================
def test_format_date(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_date(value, format='short'), u'11/10/09')
self.assertEqual(i18n.format_date(value, format='medium'), u'Nov 10, 2009')
self.assertEqual(i18n.format_date(value, format='long'), u'November 10, 2009')
self.assertEqual(i18n.format_date(value, format='full'), u'Tuesday, November 10, 2009')
def test_format_date_no_format(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_date(value), u'Nov 10, 2009')
def test_format_date_no_format_but_configured(self):
app = App(config={
'tipfy.sessions': {
'secret_key': 'secret',
},
'tipfy.i18n': {
'timezone': 'UTC',
'date_formats': {
'time': 'medium',
'date': 'medium',
'datetime': 'medium',
'time.short': None,
'time.medium': None,
'time.full': None,
'time.long': None,
'date.short': None,
'date.medium': 'full',
'date.full': None,
'date.long': None,
'datetime.short': None,
'datetime.medium': None,
'datetime.full': None,
'datetime.long': None,
}
}
})
local.request = request = Request.from_values('/')
request.app = app
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_date(value), u'Tuesday, November 10, 2009')
def test_format_date_pt_BR(self):
i18n.set_locale('pt_BR')
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_date(value, format='short'), u'10/11/09')
self.assertEqual(i18n.format_date(value, format='medium'), u'10/11/2009')
self.assertEqual(i18n.format_date(value, format='long'), u'10 de novembro de 2009')
self.assertEqual(i18n.format_date(value, format='full'), u'terça-feira, 10 de novembro de 2009')
def test_format_datetime(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_datetime(value, format='short'), u'11/10/09 4:36 PM')
self.assertEqual(i18n.format_datetime(value, format='medium'), u'Nov 10, 2009 4:36:05 PM')
self.assertEqual(i18n.format_datetime(value, format='long'), u'November 10, 2009 4:36:05 PM +0000')
#self.assertEqual(i18n.format_datetime(value, format='full'), u'Tuesday, November 10, 2009 4:36:05 PM World (GMT) Time')
self.assertEqual(i18n.format_datetime(value, format='full'), u'Tuesday, November 10, 2009 4:36:05 PM GMT+00:00')
i18n.set_timezone('America/Chicago')
self.assertEqual(i18n.format_datetime(value, format='short'), u'11/10/09 10:36 AM')
def test_format_datetime_no_format(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_datetime(value), u'Nov 10, 2009 4:36:05 PM')
def test_format_datetime_pt_BR(self):
i18n.set_locale('pt_BR')
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_datetime(value, format='short'), u'10/11/09 16:36')
self.assertEqual(i18n.format_datetime(value, format='medium'), u'10/11/2009 16:36:05')
#self.assertEqual(i18n.format_datetime(value, format='long'), u'10 de novembro de 2009 16:36:05 +0000')
self.assertEqual(i18n.format_datetime(value, format='long'), u'10 de novembro de 2009 16h36min05s +0000')
#self.assertEqual(i18n.format_datetime(value, format='full'), u'terça-feira, 10 de novembro de 2009 16h36min05s Horário Mundo (GMT)')
self.assertEqual(i18n.format_datetime(value, format='full'), u'ter\xe7a-feira, 10 de novembro de 2009 16h36min05s GMT+00:00')
def test_format_time(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_time(value, format='short'), u'4:36 PM')
self.assertEqual(i18n.format_time(value, format='medium'), u'4:36:05 PM')
self.assertEqual(i18n.format_time(value, format='long'), u'4:36:05 PM +0000')
#self.assertEqual(i18n.format_time(value, format='full'), u'4:36:05 PM World (GMT) Time')
self.assertEqual(i18n.format_time(value, format='full'), u'4:36:05 PM GMT+00:00')
def test_format_time_no_format(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_time(value), u'4:36:05 PM')
def test_format_time_pt_BR(self):
i18n.set_locale('pt_BR')
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_time(value, format='short'), u'16:36')
self.assertEqual(i18n.format_time(value, format='medium'), u'16:36:05')
#self.assertEqual(i18n.format_time(value, format='long'), u'16:36:05 +0000')
self.assertEqual(i18n.format_time(value, format='long'), u'16h36min05s +0000')
#self.assertEqual(i18n.format_time(value, format='full'), u'16h36min05s Horário Mundo (GMT)')
self.assertEqual(i18n.format_time(value, format='full'), u'16h36min05s GMT+00:00')
i18n.set_timezone('America/Chicago')
self.assertEqual(i18n.format_time(value, format='short'), u'10:36')
def test_parse_date(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.parse_date('4/1/04'), datetime.date(2004, 4, 1))
i18n.set_locale('de_DE')
self.assertEqual(i18n.parse_date('01.04.2004'), datetime.date(2004, 4, 1))
def test_parse_datetime(self):
i18n.set_locale('en_US')
self.assertRaises(NotImplementedError, i18n.parse_datetime, '4/1/04 16:08:09')
def test_parse_time(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.parse_time('18:08:09'), datetime.time(18, 8, 9))
i18n.set_locale('de_DE')
self.assertEqual(i18n.parse_time('18:08:09'), datetime.time(18, 8, 9))
def test_format_timedelta(self):
# This is only present in Babel dev, so skip if not available.
if not getattr(i18n, 'format_timedelta', None):
return
i18n.set_locale('en_US')
# ???
# self.assertEqual(i18n.format_timedelta(datetime.timedelta(weeks=12)), u'3 months')
self.assertEqual(i18n.format_timedelta(datetime.timedelta(weeks=12)), u'3 mths')
i18n.set_locale('es')
# self.assertEqual(i18n.format_timedelta(datetime.timedelta(seconds=1)), u'1 segundo')
self.assertEqual(i18n.format_timedelta(datetime.timedelta(seconds=1)), u'1 s')
i18n.set_locale('en_US')
self.assertEqual(i18n.format_timedelta(datetime.timedelta(hours=3), granularity='day'), u'1 day')
self.assertEqual(i18n.format_timedelta(datetime.timedelta(hours=23), threshold=0.9), u'1 day')
# self.assertEqual(i18n.format_timedelta(datetime.timedelta(hours=23), threshold=1.1), u'23 hours')
self.assertEqual(i18n.format_timedelta(datetime.timedelta(hours=23), threshold=1.1), u'23 hrs')
self.assertEqual(i18n.format_timedelta(datetime.datetime.now() - datetime.timedelta(days=5)), u'5 days')
def test_format_iso(self):
value = datetime.datetime(2009, 11, 10, 16, 36, 05)
self.assertEqual(i18n.format_date(value, format='iso'), u'2009-11-10')
self.assertEqual(i18n.format_time(value, format='iso'), u'16:36:05')
self.assertEqual(i18n.format_datetime(value, format='iso'), u'2009-11-10T16:36:05+0000')
#==========================================================================
# Timezones
#==========================================================================
def test_set_timezone(self):
request = get_request()
request.i18n.set_timezone('UTC')
self.assertEqual(request.i18n.tzinfo.zone, 'UTC')
request.i18n.set_timezone('America/Chicago')
self.assertEqual(request.i18n.tzinfo.zone, 'America/Chicago')
request.i18n.set_timezone('America/Sao_Paulo')
self.assertEqual(request.i18n.tzinfo.zone, 'America/Sao_Paulo')
def test_to_local_timezone(self):
request = get_request()
request.i18n.set_timezone('US/Eastern')
format = '%Y-%m-%d %H:%M:%S %Z%z'
# Test datetime with timezone set
base = datetime.datetime(2002, 10, 27, 6, 0, 0, tzinfo=pytz.UTC)
localtime = i18n.to_local_timezone(base)
result = localtime.strftime(format)
self.assertEqual(result, '2002-10-27 01:00:00 EST-0500')
# Test naive datetime - no timezone set
base = datetime.datetime(2002, 10, 27, 6, 0, 0)
localtime = i18n.to_local_timezone(base)
result = localtime.strftime(format)
self.assertEqual(result, '2002-10-27 01:00:00 EST-0500')
def test_to_utc(self):
request = get_request()
request.i18n.set_timezone('US/Eastern')
format = '%Y-%m-%d %H:%M:%S'
# Test datetime with timezone set
base = datetime.datetime(2002, 10, 27, 6, 0, 0, tzinfo=pytz.UTC)
localtime = i18n.to_utc(base)
result = localtime.strftime(format)
self.assertEqual(result, '2002-10-27 06:00:00')
# Test naive datetime - no timezone set
base = datetime.datetime(2002, 10, 27, 6, 0, 0)
localtime = i18n.to_utc(base)
result = localtime.strftime(format)
self.assertEqual(result, '2002-10-27 11:00:00')
def test_get_timezone_location(self):
i18n.set_locale('de_DE')
self.assertEqual(i18n.get_timezone_location(pytz.timezone('America/St_Johns')), u'Kanada (St. John\'s)')
i18n.set_locale('de_DE')
self.assertEqual(i18n.get_timezone_location(pytz.timezone('America/Mexico_City')), u'Mexiko (Mexiko-Stadt)')
i18n.set_locale('de_DE')
self.assertEqual(i18n.get_timezone_location(pytz.timezone('Europe/Berlin')), u'Deutschland')
#==========================================================================
# Number formatting
#==========================================================================
def test_format_number(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.format_number(1099), u'1,099')
def test_format_decimal(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.format_decimal(1.2345), u'1.234')
self.assertEqual(i18n.format_decimal(1.2346), u'1.235')
self.assertEqual(i18n.format_decimal(-1.2346), u'-1.235')
self.assertEqual(i18n.format_decimal(12345.5), u'12,345.5')
i18n.set_locale('sv_SE')
self.assertEqual(i18n.format_decimal(1.2345), u'1,234')
i18n.set_locale('de')
self.assertEqual(i18n.format_decimal(12345), u'12.345')
def test_format_currency(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.format_currency(1099.98, 'USD'), u'$1,099.98')
self.assertEqual(i18n.format_currency(1099.98, 'EUR', u'\xa4\xa4 #,##0.00'), u'EUR 1,099.98')
i18n.set_locale('es_CO')
self.assertEqual(i18n.format_currency(1099.98, 'USD'), u'US$\xa01.099,98')
i18n.set_locale('de_DE')
self.assertEqual(i18n.format_currency(1099.98, 'EUR'), u'1.099,98\xa0\u20ac')
def test_format_percent(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.format_percent(0.34), u'34%')
self.assertEqual(i18n.format_percent(25.1234), u'2,512%')
self.assertEqual(i18n.format_percent(25.1234, u'#,##0\u2030'), u'25,123\u2030')
i18n.set_locale('sv_SE')
self.assertEqual(i18n.format_percent(25.1234), u'2\xa0512\xa0%')
def test_format_scientific(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.format_scientific(10000), u'1E4')
self.assertEqual(i18n.format_scientific(1234567, u'##0E00'), u'1.23E06')
def test_parse_number(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.parse_number('1,099'), 1099L)
i18n.set_locale('de_DE')
self.assertEqual(i18n.parse_number('1.099'), 1099L)
def test_parse_number2(self):
i18n.set_locale('de')
self.assertRaises(NumberFormatError, i18n.parse_number, '1.099,98')
def test_parse_decimal(self):
i18n.set_locale('en_US')
self.assertEqual(i18n.parse_decimal('1,099.98'), 1099.98)
i18n.set_locale('de')
self.assertEqual(i18n.parse_decimal('1.099,98'), 1099.98)
def test_parse_decimal_error(self):
i18n.set_locale('de')
self.assertRaises(NumberFormatError, i18n.parse_decimal, '2,109,998')
#==========================================================================
# Miscelaneous
#==========================================================================
def test_list_translations(self):
cwd = os.getcwd()
os.chdir(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'resources'))
translations = i18n.list_translations()
self.assertEqual(len(translations), 2)
self.assertEqual(translations[0].language, 'en')
self.assertEqual(translations[0].territory, 'US')
self.assertEqual(translations[1].language, 'pt')
self.assertEqual(translations[1].territory, 'BR')
os.chdir(cwd)
def test_list_translations_no_locale_dir(self):
cwd = os.getcwd()
os.chdir(os.path.join(os.path.abspath(os.path.dirname(__file__)), 'resources', 'locale'))
self.assertEqual(i18n.list_translations(), [])
os.chdir(cwd)
if __name__ == '__main__':
test_utils.main()
|
pombreda/tipfy
|
tests/i18n_test.py
|
Python
|
bsd-3-clause
| 20,797
|
#!/usr/bin/python
from Adafruit_CharLCD import Adafruit_CharLCD
lcd = Adafruit_CharLCD()
lcd.begin(16, 1)
lcd.clear()
lcd.message('Hello World\n')
lcd.message('From Me')
|
CurtisIreland/electronics
|
RPi-CharDisp/HelloWorldLCD.py
|
Python
|
cc0-1.0
| 175
|
default_app_config = "test_custom_user_subclass.apps.CustomUserSubclassConfig"
|
jcugat/django-custom-user
|
test_custom_user_subclass/__init__.py
|
Python
|
bsd-3-clause
| 79
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaebusiness.business import CommandExecutionException
from tekton import router
from gaecookie.decorator import no_csrf
from cristianismo_app import facade
from routes.cristianismos import admin
@no_csrf
def index():
return TemplateResponse({'save_path': router.to_path(save)},'cristianismos/admin/form.html')
def save(_handler, cristianismo_id=None, **cristianismo_properties):
cmd = facade.save_cristianismo_cmd(**cristianismo_properties)
try:
cmd()
except CommandExecutionException:
context = {'errors': cmd.errors,
'cristianismo': cmd.form}
return TemplateResponse(context, 'cristianismos/admin/form.html')
_handler.redirect(router.to_path(admin))
|
RoAbreu/AulaJavaScripts
|
PROJETO/backend/appengine/routes/cristianismos/admin/new.py
|
Python
|
mit
| 863
|
import json
from django.contrib.gis.geos import Point
from django.core.urlresolvers import resolve
from django.test import RequestFactory, TestCase
from mixer.backend.django import mixer
from .. import views
class IssueListViewTestCase(TestCase):
def setUp(self):
self.view = views.issue_list
self.factory = RequestFactory()
self.project = mixer.blend('projects.Project')
self.category = mixer.blend('projects.Category')
fields = {
'project': self.project,
'category': self.category,
'point': Point((-80.6201499999999953, -5.2004500000000000))
}
self.issues = mixer.cycle(2).blend('projects.Issue', **fields)
def test_match_expected_view(self):
url = resolve('/api/v1/projects/{}/issues/'.format(self.project.slug))
self.assertEqual(url.func.__name__, self.view.__name__)
def test_load_sucessful(self):
request = self.factory.get('/')
response = self.view(request, slug=self.project.slug)
body = json.loads(response.content.decode('utf8'))
self.assertEqual(response.status_code, 200)
self.assertIn('data', body)
self.assertEqual(len(body['data']), 2)
|
erickgnavar/saywiti
|
saywiti/api_v1/tests/test_views.py
|
Python
|
mit
| 1,226
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello Franziska!"
if __name__ == "__main__":
app.run(port=8080)
|
LK-data-analytics/tutorial
|
main/app.py
|
Python
|
apache-2.0
| 158
|
from a10sdk.common.A10BaseClass import A10BaseClass
class Tftp(A10BaseClass):
"""Class Description::
NAT64 TFTP ALG (default: disabled).
Class tftp supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param tftp_enable: {"optional": true, "enum": ["enable"], "type": "string", "description": "'enable': Enable NAT64 TFTP ALG; ", "format": "enum"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/nat64/alg/tftp`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "tftp"
self.a10_url="/axapi/v3/cgnv6/nat64/alg/tftp"
self.DeviceProxy = ""
self.tftp_enable = ""
self.uuid = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
|
amwelch/a10sdk-python
|
a10sdk/core/cgnv6/cgnv6_nat64_alg_tftp.py
|
Python
|
apache-2.0
| 1,186
|
# -*- coding: utf-8 -*-
"""
werkzeug.contrib.wrappers
~~~~~~~~~~~~~~~~~~~~~~~~~
Extra wrappers or mixins contributed by the community. These wrappers can
be mixed in into request objects to add extra functionality.
Example::
from werkzeug.wrappers import Request as RequestBase
from werkzeug.contrib.wrappers import JSONRequestMixin
class Request(RequestBase, JSONRequestMixin):
pass
Afterwards this request object provides the extra functionality of the
:class:`JSONRequestMixin`.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import codecs
try:
from simplejson import loads
except ImportError:
from json import loads
from werkzeug.exceptions import BadRequest
from werkzeug.utils import cached_property
from werkzeug.http import dump_options_header, parse_options_header
from werkzeug._compat import wsgi_decoding_dance
def is_known_charset(charset):
"""Checks if the given charset is known to Python."""
try:
codecs.lookup(charset)
except LookupError:
return False
return True
class JSONRequestMixin(object):
"""Add json method to a request object. This will parse the input data
through simplejson if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not json or if the data itself cannot be parsed as json.
"""
@cached_property
def json(self):
"""Get the result of simplejson.loads if possible."""
if 'json' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a JSON request')
try:
return loads(self.data)
except Exception:
raise BadRequest('Unable to read JSON request')
class ProtobufRequestMixin(object):
"""Add protobuf parsing method to a request object. This will parse the
input data through `protobuf`_ if possible.
:exc:`~werkzeug.exceptions.BadRequest` will be raised if the content-type
is not protobuf or if the data itself cannot be parsed property.
.. _protobuf: http://code.google.com/p/protobuf/
"""
#: by default the :class:`ProtobufRequestMixin` will raise a
#: :exc:`~werkzeug.exceptions.BadRequest` if the object is not
#: initialized. You can bypass that check by setting this
#: attribute to `False`.
protobuf_check_initialization = True
def parse_protobuf(self, proto_type):
"""Parse the data into an instance of proto_type."""
if 'protobuf' not in self.environ.get('CONTENT_TYPE', ''):
raise BadRequest('Not a Protobuf request')
obj = proto_type()
try:
obj.ParseFromString(self.data)
except Exception:
raise BadRequest("Unable to parse Protobuf request")
# Fail if not all required fields are set
if self.protobuf_check_initialization and not obj.IsInitialized():
raise BadRequest("Partial Protobuf request")
return obj
class RoutingArgsRequestMixin(object):
"""This request mixin adds support for the wsgiorg routing args
`specification`_.
.. _specification: http://www.wsgi.org/wsgi/Specifications/routing_args
"""
def _get_routing_args(self):
return self.environ.get('wsgiorg.routing_args', (()))[0]
def _set_routing_args(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (value, self.routing_vars)
routing_args = property(_get_routing_args, _set_routing_args, doc='''
The positional URL arguments as `tuple`.''')
del _get_routing_args, _set_routing_args
def _get_routing_vars(self):
rv = self.environ.get('wsgiorg.routing_args')
if rv is not None:
return rv[1]
rv = {}
if not self.shallow:
self.routing_vars = rv
return rv
def _set_routing_vars(self, value):
if self.shallow:
raise RuntimeError('A shallow request tried to modify the WSGI '
'environment. If you really want to do that, '
'set `shallow` to False.')
self.environ['wsgiorg.routing_args'] = (self.routing_args, value)
routing_vars = property(_get_routing_vars, _set_routing_vars, doc='''
The keyword URL arguments as `dict`.''')
del _get_routing_vars, _set_routing_vars
class ReverseSlashBehaviorRequestMixin(object):
"""This mixin reverses the trailing slash behavior of :attr:`script_root`
and :attr:`path`. This makes it possible to use :func:`~urlparse.urljoin`
directly on the paths.
Because it changes the behavior or :class:`Request` this class has to be
mixed in *before* the actual request class::
class MyRequest(ReverseSlashBehaviorRequestMixin, Request):
pass
This example shows the differences (for an application mounted on
`/application` and the request going to `/application/foo/bar`):
+---------------+-------------------+---------------------+
| | normal behavior | reverse behavior |
+===============+===================+=====================+
| `script_root` | ``/application`` | ``/application/`` |
+---------------+-------------------+---------------------+
| `path` | ``/foo/bar`` | ``foo/bar`` |
+---------------+-------------------+---------------------+
"""
@cached_property
def path(self):
"""Requested path as unicode. This works a bit like the regular path
info in the WSGI environment but will not include a leading slash.
"""
path = wsgi_decoding_dance(self.environ.get('PATH_INFO') or '',
self.charset, self.encoding_errors)
return path.lstrip('/')
@cached_property
def script_root(self):
"""The root path of the script includling a trailing slash."""
path = wsgi_decoding_dance(self.environ.get('SCRIPT_NAME') or '',
self.charset, self.encoding_errors)
return path.rstrip('/') + '/'
class DynamicCharsetRequestMixin(object):
""""If this mixin is mixed into a request class it will provide
a dynamic `charset` attribute. This means that if the charset is
transmitted in the content type headers it's used from there.
Because it changes the behavior or :class:`Request` this class has
to be mixed in *before* the actual request class::
class MyRequest(DynamicCharsetRequestMixin, Request):
pass
By default the request object assumes that the URL charset is the
same as the data charset. If the charset varies on each request
based on the transmitted data it's not a good idea to let the URLs
change based on that. Most browsers assume either utf-8 or latin1
for the URLs if they have troubles figuring out. It's strongly
recommended to set the URL charset to utf-8::
class MyRequest(DynamicCharsetRequestMixin, Request):
url_charset = 'utf-8'
.. versionadded:: 0.6
"""
#: the default charset that is assumed if the content type header
#: is missing or does not contain a charset parameter. The default
#: is latin1 which is what HTTP specifies as default charset.
#: You may however want to set this to utf-8 to better support
#: browsers that do not transmit a charset for incoming data.
default_charset = 'latin1'
def unknown_charset(self, charset):
"""Called if a charset was provided but is not supported by
the Python codecs module. By default latin1 is assumed then
to not lose any information, you may override this method to
change the behavior.
:param charset: the charset that was not found.
:return: the replacement charset.
"""
return 'latin1'
@cached_property
def charset(self):
"""The charset from the content type."""
header = self.environ.get('CONTENT_TYPE')
if header:
ct, options = parse_options_header(header)
charset = options.get('charset')
if charset:
if is_known_charset(charset):
return charset
return self.unknown_charset(charset)
return self.default_charset
class DynamicCharsetResponseMixin(object):
"""If this mixin is mixed into a response class it will provide
a dynamic `charset` attribute. This means that if the charset is
looked up and stored in the `Content-Type` header and updates
itself automatically. This also means a small performance hit but
can be useful if you're working with different charsets on
responses.
Because the charset attribute is no a property at class-level, the
default value is stored in `default_charset`.
Because it changes the behavior or :class:`Response` this class has
to be mixed in *before* the actual response class::
class MyResponse(DynamicCharsetResponseMixin, Response):
pass
.. versionadded:: 0.6
"""
#: the default charset.
default_charset = 'utf-8'
def _get_charset(self):
header = self.headers.get('content-type')
if header:
charset = parse_options_header(header)[1].get('charset')
if charset:
return charset
return self.default_charset
def _set_charset(self, charset):
header = self.headers.get('content-type')
ct, options = parse_options_header(header)
if not ct:
raise TypeError('Cannot set charset if Content-Type '
'header is missing.')
options['charset'] = charset
self.headers['Content-Type'] = dump_options_header(ct, options)
charset = property(_get_charset, _set_charset, doc="""
The charset for the response. It's stored inside the
Content-Type header as a parameter.""")
del _get_charset, _set_charset
|
jeremydane/Info3180-Project4
|
server/lib/werkzeug/contrib/wrappers.py
|
Python
|
apache-2.0
| 10,609
|
#!/usr/bin/env python
"""
GeneOntology class specific tests
The database must be up and running for these tests to pass
See /htsint/database/HOWTO
"""
import sys,os,unittest,time,re,time
import matplotlib as mpl
if mpl.get_backend() != 'agg':
mpl.use('agg')
from htsint.database import ask_upass
from htsint import GeneOntology
## global variables
UPASS = ask_upass()
## test class for the main window function
class GeneOntologyTest(unittest.TestCase):
"""
Run a number of tests using taxa id 7227
"""
def setUp(self):
"""
simple setup
"""
self.taxId = '5476'
def testCheckTaxon(self):
"""
ensure taxon check works
"""
go = GeneOntology([self.taxId],upass=UPASS)
go.check_taxon(self.taxId)
def test01GetDicts(self):
"""
ensure we can create, save and retrieve gene2go and go2gene dictionaries
"""
termsPath = 'terms.pickle'
if os.path.exists(termsPath) == True:
os.remove(termsPath)
go = GeneOntology([self.taxId],upass=UPASS,idType='ncbi',useIea=True,\
aspect='biological_process')
go.create_dicts(termsPath)
gene2go, go2gene = go.load_dicts(termsPath)
print("there are %s genes"%(len(gene2go.keys())))
print("there are %s terms"%(len(go2gene.keys())))
#def test02CreateGoGraph(self):
# """
# ensure we can create, save and retrieve the gograph
# """
#
# termsPickle = 'terms.pickle'
# graphPickle = 'graph.pickle'
#
# go = GeneOntology(self.taxId,upass=UPASS,idType='ncbi',useIea=True)
# G = go.create_gograph(termsPath=termsPickle,graphPath=graphPickle)
#
# for picklePath in [termsPickle,graphPickle]:
# if os.path.exists(picklePath):
# os.remove(picklePath)
### Run the tests
if __name__ == '__main__':
unittest.main()
|
ajrichards/htsint
|
unittests/GeneOntologyTest.py
|
Python
|
bsd-3-clause
| 1,983
|
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
import re
class JSChecker(object):
def __init__(self, input_api, file_filter=None):
self.input_api = input_api
if file_filter:
self.file_filter = file_filter
else:
self.file_filter = lambda x: True
def RegexCheck(self, line_number, line, regex, message):
"""Searches for |regex| in |line| to check for a particular style
violation, returning a message like the one below if the regex matches.
The |regex| must have exactly one capturing group so that the relevant
part of |line| can be highlighted. If more groups are needed, use
"(?:...)" to make a non-capturing group. Sample message:
line 6: Use var instead of const.
const foo = bar();
^^^^^
"""
match = re.search(regex, line)
if match:
assert len(match.groups()) == 1
start = match.start(1)
length = match.end(1) - start
return ' line %d: %s\n%s\n%s' % (
line_number,
message,
line,
self.error_highlight(start, length))
return ''
def ConstCheck(self, i, line):
"""Check for use of the 'const' keyword."""
if re.search(r'\*\s+@const', line):
# Probably a JsDoc line
return ''
return self.RegexCheck(i, line, r'(?:^|\s|\()(const)\s',
'Use var instead of const.')
def error_highlight(self, start, length):
"""Takes a start position and a length, and produces a row of '^'s to
highlight the corresponding part of a string.
"""
return start * ' ' + length * '^'
def _makeErrorOrWarning(self, error_text, filename):
return error_text
def RunChecks(self):
"""Check for violations of the Chromium JavaScript style guide. See
http://chromium.org/developers/web-development-style-guide#TOC-JavaScript
"""
import sys
import warnings
old_path = sys.path
old_filters = warnings.filters
try:
base_path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '..'))
closure_linter_path = os.path.join(
base_path, 'third_party', 'closure_linter')
gflags_path = os.path.join(
base_path, 'third_party', 'python_gflags')
sys.path.insert(0, closure_linter_path)
sys.path.insert(0, gflags_path)
warnings.filterwarnings('ignore', category=DeprecationWarning)
from closure_linter import checker, errors
from closure_linter.common import errorhandler
finally:
sys.path = old_path
warnings.filters = old_filters
class ErrorHandlerImpl(errorhandler.ErrorHandler):
"""Filters out errors that don't apply to Chromium JavaScript code."""
def __init__(self):
self._errors = []
def HandleFile(self, filename, first_token):
self._filename = filename
def HandleError(self, error):
if (self._valid(error)):
error.filename = self._filename
self._errors.append(error)
def GetErrors(self):
return self._errors
def HasErrors(self):
return bool(self._errors)
def _valid(self, error):
"""Check whether an error is valid. Most errors are valid, with a few
exceptions which are listed here.
"""
is_grit_statement = bool(
re.search("</?(include|if)", error.token.line))
return not is_grit_statement and error.code not in [
errors.JSDOC_ILLEGAL_QUESTION_WITH_PIPE,
errors.JSDOC_TAG_DESCRIPTION_ENDS_WITH_INVALID_CHARACTER,
errors.MISSING_JSDOC_TAG_THIS,
]
results = []
try:
affected_files = self.input_api.AffectedFiles(
file_filter=self.file_filter,
include_deletes=False)
except:
affected_files = []
def ShouldCheck(f):
if f.filename.endswith('.js'):
return True
if f.filename.endswith('.html'):
return True
return False
affected_js_files = filter(ShouldCheck, affected_files)
for f in affected_js_files:
error_lines = []
for i, line in enumerate(f.contents_as_lines, start=1):
error_lines += filter(None, [
self.ConstCheck(i, line),
])
# Use closure_linter to check for several different errors
import gflags as flags
flags.FLAGS.strict = True
error_handler = ErrorHandlerImpl()
js_checker = checker.JavaScriptStyleChecker(error_handler)
js_checker.Check(os.path.join(
self.input_api.repository_root,
f.filename))
for error in error_handler.GetErrors():
highlight = self.error_highlight(
error.token.start_index, error.token.length)
error_msg = ' line %d: E%04d: %s\n%s\n%s' % (
error.token.line_number,
error.code,
error.message,
error.token.line.rstrip(),
highlight)
error_lines.append(error_msg)
if error_lines:
error_lines = [
'Found JavaScript style violations in %s:' %
f.filename] + error_lines
results.append(self._makeErrorOrWarning(
'\n'.join(error_lines), f.filename))
return results
def RunChecks(input_api):
return JSChecker(input_api).RunChecks()
|
guorendong/iridium-browser-ubuntu
|
third_party/trace-viewer/hooks/js_checks.py
|
Python
|
bsd-3-clause
| 5,418
|
# Copyright 2015-2018 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from azure_common import BaseTest, arm_template
class CognitiveServiceTest(BaseTest):
def setUp(self):
super(CognitiveServiceTest, self).setUp()
def test_cognitive_service_schema_validate(self):
with self.sign_out_patch():
p = self.load_policy({
'name': 'test-azure-cognitive-service',
'resource': 'azure.cognitiveservice'
}, validate=True)
self.assertTrue(p)
@arm_template('cognitive-service.json')
def test_find_by_name(self):
p = self.load_policy({
'name': 'test-azure-cog-serv',
'resource': 'azure.cognitiveservice',
'filters': [
{'type': 'value',
'key': 'name',
'op': 'eq',
'value': 'cctest-cog-serv'}],
})
resources = p.run()
self.assertEqual(len(resources), 1)
|
FireballDWF/cloud-custodian
|
tools/c7n_azure/tests/test_cognitive_service.py
|
Python
|
apache-2.0
| 1,581
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""HTTP utility code shared by clients and servers.
This module also defines the `HTTPServerRequest` class which is exposed
via `tornado.web.RequestHandler.request`.
"""
from __future__ import absolute_import, division, print_function, with_statement
import calendar
import collections
import copy
import datetime
import email.utils
import numbers
import re
import time
from tornado.escape import native_str, parse_qs_bytes, utf8
from tornado.log import gen_log
from tornado.util import ObjectDict
try:
import Cookie # py2
except ImportError:
import http.cookies as Cookie # py3
try:
from httplib import responses # py2
except ImportError:
from http.client import responses # py3
# responses is unused in this file, but we re-export it to other files.
# Reference it so pyflakes doesn't complain.
responses
try:
from urllib import urlencode # py2
except ImportError:
from urllib.parse import urlencode # py3
try:
from ssl import SSLError
except ImportError:
# ssl is unavailable on app engine.
class SSLError(Exception):
pass
class _NormalizedHeaderCache(dict):
"""Dynamic cached mapping of header names to Http-Header-Case.
Implemented as a dict subclass so that cache hits are as fast as a
normal dict lookup, without the overhead of a python function
call.
>>> normalized_headers = _NormalizedHeaderCache(10)
>>> normalized_headers["coNtent-TYPE"]
'Content-Type'
"""
def __init__(self, size):
super(_NormalizedHeaderCache, self).__init__()
self.size = size
self.queue = collections.deque()
def __missing__(self, key):
normalized = "-".join([w.capitalize() for w in key.split("-")])
self[key] = normalized
self.queue.append(key)
if len(self.queue) > self.size:
# Limit the size of the cache. LRU would be better, but this
# simpler approach should be fine. In Python 2.7+ we could
# use OrderedDict (or in 3.2+, @functools.lru_cache).
old_key = self.queue.popleft()
del self[old_key]
return normalized
_normalized_headers = _NormalizedHeaderCache(1000)
class HTTPHeaders(dict):
"""A dictionary that maintains ``Http-Header-Case`` for all keys.
Supports multiple values per key via a pair of new methods,
`add()` and `get_list()`. The regular dictionary interface
returns a single value per key, with multiple values joined by a
comma.
>>> h = HTTPHeaders({"content-type": "text/html"})
>>> list(h.keys())
['Content-Type']
>>> h["Content-Type"]
'text/html'
>>> h.add("Set-Cookie", "A=B")
>>> h.add("Set-Cookie", "C=D")
>>> h["set-cookie"]
'A=B,C=D'
>>> h.get_list("set-cookie")
['A=B', 'C=D']
>>> for (k,v) in sorted(h.get_all()):
... print('%s: %s' % (k,v))
...
Content-Type: text/html
Set-Cookie: A=B
Set-Cookie: C=D
"""
def __init__(self, *args, **kwargs):
# Don't pass args or kwargs to dict.__init__, as it will bypass
# our __setitem__
dict.__init__(self)
self._as_list = {}
self._last_key = None
if (len(args) == 1 and len(kwargs) == 0 and
isinstance(args[0], HTTPHeaders)):
# Copy constructor
for k, v in args[0].get_all():
self.add(k, v)
else:
# Dict-style initialization
self.update(*args, **kwargs)
# new public methods
def add(self, name, value):
"""Adds a new value for the given key."""
norm_name = _normalized_headers[name]
self._last_key = norm_name
if norm_name in self:
# bypass our override of __setitem__ since it modifies _as_list
dict.__setitem__(self, norm_name,
native_str(self[norm_name]) + ',' +
native_str(value))
self._as_list[norm_name].append(value)
else:
self[norm_name] = value
def get_list(self, name):
"""Returns all values for the given header as a list."""
norm_name = _normalized_headers[name]
return self._as_list.get(norm_name, [])
def get_all(self):
"""Returns an iterable of all (name, value) pairs.
If a header has multiple values, multiple pairs will be
returned with the same name.
"""
for name, values in self._as_list.items():
for value in values:
yield (name, value)
def parse_line(self, line):
"""Updates the dictionary with a single header line.
>>> h = HTTPHeaders()
>>> h.parse_line("Content-Type: text/html")
>>> h.get('content-type')
'text/html'
"""
if line[0].isspace():
# continuation of a multi-line header
new_part = ' ' + line.lstrip()
self._as_list[self._last_key][-1] += new_part
dict.__setitem__(self, self._last_key,
self[self._last_key] + new_part)
else:
name, value = line.split(":", 1)
self.add(name, value.strip())
@classmethod
def parse(cls, headers):
"""Returns a dictionary from HTTP header text.
>>> h = HTTPHeaders.parse("Content-Type: text/html\\r\\nContent-Length: 42\\r\\n")
>>> sorted(h.items())
[('Content-Length', '42'), ('Content-Type', 'text/html')]
"""
h = cls()
for line in headers.splitlines():
if line:
h.parse_line(line)
return h
# dict implementation overrides
def __setitem__(self, name, value):
norm_name = _normalized_headers[name]
dict.__setitem__(self, norm_name, value)
self._as_list[norm_name] = [value]
def __getitem__(self, name):
return dict.__getitem__(self, _normalized_headers[name])
def __delitem__(self, name):
norm_name = _normalized_headers[name]
dict.__delitem__(self, norm_name)
del self._as_list[norm_name]
def __contains__(self, name):
norm_name = _normalized_headers[name]
return dict.__contains__(self, norm_name)
def get(self, name, default=None):
return dict.get(self, _normalized_headers[name], default)
def update(self, *args, **kwargs):
# dict.update bypasses our __setitem__
for k, v in dict(*args, **kwargs).items():
self[k] = v
def copy(self):
# default implementation returns dict(self), not the subclass
return HTTPHeaders(self)
class HTTPServerRequest(object):
"""A single HTTP request.
All attributes are type `str` unless otherwise noted.
.. attribute:: method
HTTP request method, e.g. "GET" or "POST"
.. attribute:: uri
The requested uri.
.. attribute:: path
The path portion of `uri`
.. attribute:: query
The query portion of `uri`
.. attribute:: version
HTTP version specified in request, e.g. "HTTP/1.1"
.. attribute:: headers
`.HTTPHeaders` dictionary-like object for request headers. Acts like
a case-insensitive dictionary with additional methods for repeated
headers.
.. attribute:: body
Request body, if present, as a byte string.
.. attribute:: remote_ip
Client's IP address as a string. If ``HTTPServer.xheaders`` is set,
will pass along the real IP address provided by a load balancer
in the ``X-Real-Ip`` or ``X-Forwarded-For`` header.
.. versionchanged:: 3.1
The list format of ``X-Forwarded-For`` is now supported.
.. attribute:: protocol
The protocol used, either "http" or "https". If ``HTTPServer.xheaders``
is set, will pass along the protocol used by a load balancer if
reported via an ``X-Scheme`` header.
.. attribute:: host
The requested hostname, usually taken from the ``Host`` header.
.. attribute:: arguments
GET/POST arguments are available in the arguments property, which
maps arguments names to lists of values (to support multiple values
for individual names). Names are of type `str`, while arguments
are byte strings. Note that this is different from
`.RequestHandler.get_argument`, which returns argument values as
unicode strings.
.. attribute:: query_arguments
Same format as ``arguments``, but contains only arguments extracted
from the query string.
.. versionadded:: 3.2
.. attribute:: body_arguments
Same format as ``arguments``, but contains only arguments extracted
from the request body.
.. versionadded:: 3.2
.. attribute:: files
File uploads are available in the files property, which maps file
names to lists of `.HTTPFile`.
.. attribute:: connection
An HTTP request is attached to a single HTTP connection, which can
be accessed through the "connection" attribute. Since connections
are typically kept open in HTTP/1.1, multiple requests can be handled
sequentially on a single connection.
.. versionchanged:: 4.0
Moved from ``tornado.httpserver.HTTPRequest``.
"""
def __init__(self, method=None, uri=None, version="HTTP/1.0", headers=None,
body=None, host=None, files=None, connection=None,
start_line=None):
if start_line is not None:
method, uri, version = start_line
self.method = method
self.uri = uri
self.version = version
self.headers = headers or HTTPHeaders()
self.body = body or ""
# set remote IP and protocol
context = getattr(connection, 'context', None)
self.remote_ip = getattr(context, 'remote_ip', None)
self.protocol = getattr(context, 'protocol', "http")
self.host = host or self.headers.get("Host") or "127.0.0.1"
self.files = files or {}
self.connection = connection
self._start_time = time.time()
self._finish_time = None
self.path, sep, self.query = uri.partition('?')
self.arguments = parse_qs_bytes(self.query, keep_blank_values=True)
self.query_arguments = copy.deepcopy(self.arguments)
self.body_arguments = {}
def supports_http_1_1(self):
"""Returns True if this request supports HTTP/1.1 semantics.
.. deprecated:: 4.0
Applications are less likely to need this information with the
introduction of `.HTTPConnection`. If you still need it, access
the ``version`` attribute directly.
"""
return self.version == "HTTP/1.1"
@property
def cookies(self):
"""A dictionary of Cookie.Morsel objects."""
if not hasattr(self, "_cookies"):
self._cookies = Cookie.SimpleCookie()
if "Cookie" in self.headers:
try:
self._cookies.load(
native_str(self.headers["Cookie"]))
except Exception:
self._cookies = {}
return self._cookies
def write(self, chunk, callback=None):
"""Writes the given chunk to the response stream.
.. deprecated:: 4.0
Use ``request.connection`` and the `.HTTPConnection` methods
to write the response.
"""
assert isinstance(chunk, bytes)
self.connection.write(chunk, callback=callback)
def finish(self):
"""Finishes this HTTP request on the open connection.
.. deprecated:: 4.0
Use ``request.connection`` and the `.HTTPConnection` methods
to write the response.
"""
self.connection.finish()
self._finish_time = time.time()
def full_url(self):
"""Reconstructs the full URL for this request."""
return self.protocol + "://" + self.host + self.uri
def request_time(self):
"""Returns the amount of time it took for this request to execute."""
if self._finish_time is None:
return time.time() - self._start_time
else:
return self._finish_time - self._start_time
def get_ssl_certificate(self, binary_form=False):
"""Returns the client's SSL certificate, if any.
To use client certificates, the HTTPServer must have been constructed
with cert_reqs set in ssl_options, e.g.::
server = HTTPServer(app,
ssl_options=dict(
certfile="foo.crt",
keyfile="foo.key",
cert_reqs=ssl.CERT_REQUIRED,
ca_certs="cacert.crt"))
By default, the return value is a dictionary (or None, if no
client certificate is present). If ``binary_form`` is true, a
DER-encoded form of the certificate is returned instead. See
SSLSocket.getpeercert() in the standard library for more
details.
http://docs.python.org/library/ssl.html#sslsocket-objects
"""
try:
return self.connection.stream.socket.getpeercert(
binary_form=binary_form)
except SSLError:
return None
def _parse_body(self):
parse_body_arguments(
self.headers.get("Content-Type", ""), self.body,
self.body_arguments, self.files,
self.headers)
for k, v in self.body_arguments.items():
self.arguments.setdefault(k, []).extend(v)
def __repr__(self):
attrs = ("protocol", "host", "method", "uri", "version", "remote_ip")
args = ", ".join(["%s=%r" % (n, getattr(self, n)) for n in attrs])
return "%s(%s, headers=%s)" % (
self.__class__.__name__, args, dict(self.headers))
class HTTPInputError(Exception):
"""Exception class for malformed HTTP requests or responses
from remote sources.
.. versionadded:: 4.0
"""
pass
class HTTPOutputError(Exception):
"""Exception class for errors in HTTP output.
.. versionadded:: 4.0
"""
pass
class HTTPServerConnectionDelegate(object):
"""Implement this interface to handle requests from `.HTTPServer`.
.. versionadded:: 4.0
"""
def start_request(self, server_conn, request_conn):
"""This method is called by the server when a new request has started.
:arg server_conn: is an opaque object representing the long-lived
(e.g. tcp-level) connection.
:arg request_conn: is a `.HTTPConnection` object for a single
request/response exchange.
This method should return a `.HTTPMessageDelegate`.
"""
raise NotImplementedError()
def on_close(self, server_conn):
"""This method is called when a connection has been closed.
:arg server_conn: is a server connection that has previously been
passed to ``start_request``.
"""
pass
class HTTPMessageDelegate(object):
"""Implement this interface to handle an HTTP request or response.
.. versionadded:: 4.0
"""
def headers_received(self, start_line, headers):
"""Called when the HTTP headers have been received and parsed.
:arg start_line: a `.RequestStartLine` or `.ResponseStartLine`
depending on whether this is a client or server message.
:arg headers: a `.HTTPHeaders` instance.
Some `.HTTPConnection` methods can only be called during
``headers_received``.
May return a `.Future`; if it does the body will not be read
until it is done.
"""
pass
def data_received(self, chunk):
"""Called when a chunk of data has been received.
May return a `.Future` for flow control.
"""
pass
def finish(self):
"""Called after the last chunk of data has been received."""
pass
def on_connection_close(self):
"""Called if the connection is closed without finishing the request.
If ``headers_received`` is called, either ``finish`` or
``on_connection_close`` will be called, but not both.
"""
pass
class HTTPConnection(object):
"""Applications use this interface to write their responses.
.. versionadded:: 4.0
"""
def write_headers(self, start_line, headers, chunk=None, callback=None):
"""Write an HTTP header block.
:arg start_line: a `.RequestStartLine` or `.ResponseStartLine`.
:arg headers: a `.HTTPHeaders` instance.
:arg chunk: the first (optional) chunk of data. This is an optimization
so that small responses can be written in the same call as their
headers.
:arg callback: a callback to be run when the write is complete.
Returns a `.Future` if no callback is given.
"""
raise NotImplementedError()
def write(self, chunk, callback=None):
"""Writes a chunk of body data.
The callback will be run when the write is complete. If no callback
is given, returns a Future.
"""
raise NotImplementedError()
def finish(self):
"""Indicates that the last body data has been written.
"""
raise NotImplementedError()
def url_concat(url, args):
"""Concatenate url and argument dictionary regardless of whether
url has existing query parameters.
>>> url_concat("http://example.com/foo?a=b", dict(c="d"))
'http://example.com/foo?a=b&c=d'
"""
if not args:
return url
if url[-1] not in ('?', '&'):
url += '&' if ('?' in url) else '?'
return url + urlencode(args)
class HTTPFile(ObjectDict):
"""Represents a file uploaded via a form.
For backwards compatibility, its instance attributes are also
accessible as dictionary keys.
* ``filename``
* ``body``
* ``content_type``
"""
pass
def _parse_request_range(range_header):
"""Parses a Range header.
Returns either ``None`` or tuple ``(start, end)``.
Note that while the HTTP headers use inclusive byte positions,
this method returns indexes suitable for use in slices.
>>> start, end = _parse_request_range("bytes=1-2")
>>> start, end
(1, 3)
>>> [0, 1, 2, 3, 4][start:end]
[1, 2]
>>> _parse_request_range("bytes=6-")
(6, None)
>>> _parse_request_range("bytes=-6")
(-6, None)
>>> _parse_request_range("bytes=-0")
(None, 0)
>>> _parse_request_range("bytes=")
(None, None)
>>> _parse_request_range("foo=42")
>>> _parse_request_range("bytes=1-2,6-10")
Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed).
See [0] for the details of the range header.
[0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges
"""
unit, _, value = range_header.partition("=")
unit, value = unit.strip(), value.strip()
if unit != "bytes":
return None
start_b, _, end_b = value.partition("-")
try:
start = _int_or_none(start_b)
end = _int_or_none(end_b)
except ValueError:
return None
if end is not None:
if start is None:
if end != 0:
start = -end
end = None
else:
end += 1
return (start, end)
def _get_content_range(start, end, total):
"""Returns a suitable Content-Range header:
>>> print(_get_content_range(None, 1, 4))
bytes 0-0/4
>>> print(_get_content_range(1, 3, 4))
bytes 1-2/4
>>> print(_get_content_range(None, None, 4))
bytes 0-3/4
"""
start = start or 0
end = (end or total) - 1
return "bytes %s-%s/%s" % (start, end, total)
def _int_or_none(val):
val = val.strip()
if val == "":
return None
return int(val)
def parse_body_arguments(content_type, body, arguments, files, headers=None):
"""Parses a form request body.
Supports ``application/x-www-form-urlencoded`` and
``multipart/form-data``. The ``content_type`` parameter should be
a string and ``body`` should be a byte string. The ``arguments``
and ``files`` parameters are dictionaries that will be updated
with the parsed contents.
"""
if headers and 'Content-Encoding' in headers:
gen_log.warning("Unsupported Content-Encoding: %s",
headers['Content-Encoding'])
return
if content_type.startswith("application/x-www-form-urlencoded"):
try:
uri_arguments = parse_qs_bytes(native_str(body), keep_blank_values=True)
except Exception as e:
gen_log.warning('Invalid x-www-form-urlencoded body: %s', e)
uri_arguments = {}
for name, values in uri_arguments.items():
if values:
arguments.setdefault(name, []).extend(values)
elif content_type.startswith("multipart/form-data"):
fields = content_type.split(";")
for field in fields:
k, sep, v = field.strip().partition("=")
if k == "boundary" and v:
parse_multipart_form_data(utf8(v), body, arguments, files)
break
else:
gen_log.warning("Invalid multipart/form-data")
def parse_multipart_form_data(boundary, data, arguments, files):
"""Parses a ``multipart/form-data`` body.
The ``boundary`` and ``data`` parameters are both byte strings.
The dictionaries given in the arguments and files parameters
will be updated with the contents of the body.
"""
# The standard allows for the boundary to be quoted in the header,
# although it's rare (it happens at least for google app engine
# xmpp). I think we're also supposed to handle backslash-escapes
# here but I'll save that until we see a client that uses them
# in the wild.
if boundary.startswith(b'"') and boundary.endswith(b'"'):
boundary = boundary[1:-1]
final_boundary_index = data.rfind(b"--" + boundary + b"--")
if final_boundary_index == -1:
gen_log.warning("Invalid multipart/form-data: no final boundary")
return
parts = data[:final_boundary_index].split(b"--" + boundary + b"\r\n")
for part in parts:
if not part:
continue
eoh = part.find(b"\r\n\r\n")
if eoh == -1:
gen_log.warning("multipart/form-data missing headers")
continue
headers = HTTPHeaders.parse(part[:eoh].decode("utf-8"))
disp_header = headers.get("Content-Disposition", "")
disposition, disp_params = _parse_header(disp_header)
if disposition != "form-data" or not part.endswith(b"\r\n"):
gen_log.warning("Invalid multipart/form-data")
continue
value = part[eoh + 4:-2]
if not disp_params.get("name"):
gen_log.warning("multipart/form-data value missing name")
continue
name = disp_params["name"]
if disp_params.get("filename"):
ctype = headers.get("Content-Type", "application/unknown")
files.setdefault(name, []).append(HTTPFile(
filename=disp_params["filename"], body=value,
content_type=ctype))
else:
arguments.setdefault(name, []).append(value)
def format_timestamp(ts):
"""Formats a timestamp in the format used by HTTP.
The argument may be a numeric timestamp as returned by `time.time`,
a time tuple as returned by `time.gmtime`, or a `datetime.datetime`
object.
>>> format_timestamp(1359312200)
'Sun, 27 Jan 2013 18:43:20 GMT'
"""
if isinstance(ts, numbers.Real):
pass
elif isinstance(ts, (tuple, time.struct_time)):
ts = calendar.timegm(ts)
elif isinstance(ts, datetime.datetime):
ts = calendar.timegm(ts.utctimetuple())
else:
raise TypeError("unknown timestamp type: %r" % ts)
return email.utils.formatdate(ts, usegmt=True)
RequestStartLine = collections.namedtuple(
'RequestStartLine', ['method', 'path', 'version'])
def parse_request_start_line(line):
"""Returns a (method, path, version) tuple for an HTTP 1.x request line.
The response is a `collections.namedtuple`.
>>> parse_request_start_line("GET /foo HTTP/1.1")
RequestStartLine(method='GET', path='/foo', version='HTTP/1.1')
"""
try:
method, path, version = line.split(" ")
except ValueError:
raise HTTPInputError("Malformed HTTP request line")
if not version.startswith("HTTP/"):
raise HTTPInputError(
"Malformed HTTP version in HTTP Request-Line: %r" % version)
return RequestStartLine(method, path, version)
ResponseStartLine = collections.namedtuple(
'ResponseStartLine', ['version', 'code', 'reason'])
def parse_response_start_line(line):
"""Returns a (version, code, reason) tuple for an HTTP 1.x response line.
The response is a `collections.namedtuple`.
>>> parse_response_start_line("HTTP/1.1 200 OK")
ResponseStartLine(version='HTTP/1.1', code=200, reason='OK')
"""
line = native_str(line)
match = re.match("(HTTP/1.[01]) ([0-9]+) ([^\r]*)", line)
if not match:
raise HTTPInputError("Error parsing response start line")
return ResponseStartLine(match.group(1), int(match.group(2)),
match.group(3))
# _parseparam and _parse_header are copied and modified from python2.7's cgi.py
# The original 2.7 version of this code did not correctly support some
# combinations of semicolons and double quotes.
# It has also been modified to support valueless parameters as seen in
# websocket extension negotiations.
def _parseparam(s):
while s[:1] == ';':
s = s[1:]
end = s.find(';')
while end > 0 and (s.count('"', 0, end) - s.count('\\"', 0, end)) % 2:
end = s.find(';', end + 1)
if end < 0:
end = len(s)
f = s[:end]
yield f.strip()
s = s[end:]
def _parse_header(line):
"""Parse a Content-type like header.
Return the main content-type and a dictionary of options.
"""
parts = _parseparam(';' + line)
key = next(parts)
pdict = {}
for p in parts:
i = p.find('=')
if i >= 0:
name = p[:i].strip().lower()
value = p[i + 1:].strip()
if len(value) >= 2 and value[0] == value[-1] == '"':
value = value[1:-1]
value = value.replace('\\\\', '\\').replace('\\"', '"')
pdict[name] = value
else:
pdict[p] = None
return key, pdict
def _encode_header(key, pdict):
"""Inverse of _parse_header.
>>> _encode_header('permessage-deflate',
... {'client_max_window_bits': 15, 'client_no_context_takeover': None})
'permessage-deflate; client_max_window_bits=15; client_no_context_takeover'
"""
if not pdict:
return key
out = [key]
# Sort the parameters just to make it easy to test.
for k, v in sorted(pdict.items()):
if v is None:
out.append(k)
else:
# TODO: quote if necessary.
out.append('%s=%s' % (k, v))
return '; '.join(out)
def doctests():
import doctest
return doctest.DocTestSuite()
|
0xkag/tornado
|
tornado/httputil.py
|
Python
|
apache-2.0
| 27,943
|
#!/usr/bin/env python
"""This module contains tests for user API renderers."""
from grr.gui import api_test_lib
from grr.gui.api_plugins import user as user_plugin
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import flags
from grr.lib import flow
from grr.lib import hunts
from grr.lib import test_lib
from grr.lib import utils
from grr.lib.aff4_objects import users as aff4_users
class ApiUserApprovalsListRendererTest(test_lib.GRRBaseTest):
"""Test for ApiUserApprovalsListRenderer."""
def setUp(self):
super(ApiUserApprovalsListRendererTest, self).setUp()
self.client_id = self.SetupClients(1)[0]
self.renderer = user_plugin.ApiUserApprovalsListRenderer()
def testRendersRequestedClientApproval(self):
flow.GRRFlow.StartFlow(client_id=self.client_id,
flow_name="RequestClientApprovalFlow",
reason=self.token.reason,
subject_urn=self.client_id,
approver="approver",
token=self.token)
args = user_plugin.ApiUserApprovalsListRendererArgs(approval_type="client")
result = self.renderer.Render(args, token=self.token)
self.assertEqual(result["offset"], 0)
self.assertEqual(result["count"], 1)
self.assertEqual(len(result["items"]), 1)
def testRendersRequestedHuntAppoval(self):
hunt_urn = aff4.ROOT_URN.Add("hunts").Add("H:ABCD1234")
with aff4.FACTORY.Create(hunt_urn, aff4_type="AFF4Volume",
token=self.token) as _:
pass
flow.GRRFlow.StartFlow(flow_name="RequestHuntApprovalFlow",
reason=self.token.reason,
subject_urn=hunt_urn,
approver="approver",
token=self.token)
args = user_plugin.ApiUserApprovalsListRendererArgs(approval_type="hunt")
result = self.renderer.Render(args, token=self.token)
self.assertEqual(result["offset"], 0)
self.assertEqual(result["count"], 1)
self.assertEqual(len(result["items"]), 1)
def testRendersRequestedCronJobApproval(self):
cron_urn = aff4.ROOT_URN.Add("cron").Add("CronJobFoo")
with aff4.FACTORY.Create(cron_urn, aff4_type="AFF4Volume",
token=self.token) as _:
pass
flow.GRRFlow.StartFlow(flow_name="RequestCronJobApprovalFlow",
reason=self.token.reason,
subject_urn=cron_urn,
approver="approver",
token=self.token)
args = user_plugin.ApiUserApprovalsListRendererArgs(approval_type="cron")
result = self.renderer.Render(args, token=self.token)
self.assertEqual(result["offset"], 0)
self.assertEqual(result["count"], 1)
self.assertEqual(len(result["items"]), 1)
class ApiUserApprovalsListRendererRegressionTest(
api_test_lib.ApiCallRendererRegressionTest):
"""Regression test for ApiUserApprovalsListRendererTest."""
renderer = "ApiUserApprovalsListRenderer"
def Run(self):
with test_lib.FakeTime(42):
self.CreateAdminUser("approver")
clients = self.SetupClients(2)
for client_id in clients:
# Delete the certificate as it's being regenerated every time the
# client is created.
with aff4.FACTORY.Open(client_id, mode="rw",
token=self.token) as grr_client:
grr_client.DeleteAttribute(grr_client.Schema.CERT)
hunt = hunts.GRRHunt.StartHunt(
hunt_name="GenericHunt", token=self.token)
with test_lib.FakeTime(43):
flow.GRRFlow.StartFlow(flow_name="RequestHuntApprovalFlow",
reason=self.token.reason,
subject_urn=hunt.urn,
approver="approver",
token=self.token)
with test_lib.FakeTime(44):
flow.GRRFlow.StartFlow(client_id=clients[0],
flow_name="RequestClientApprovalFlow",
reason=self.token.reason,
subject_urn=clients[0],
approver="approver",
token=self.token)
with test_lib.FakeTime(45):
flow.GRRFlow.StartFlow(client_id=clients[1],
flow_name="RequestClientApprovalFlow",
reason=self.token.reason,
subject_urn=clients[1],
approver="approver",
token=self.token)
with test_lib.FakeTime(84):
approver_token = access_control.ACLToken(username="approver")
flow.GRRFlow.StartFlow(client_id=clients[1],
flow_name="GrantClientApprovalFlow",
reason=self.token.reason,
delegate=self.token.username,
subject_urn=clients[1],
token=approver_token)
with test_lib.FakeTime(126):
self.Check("GET", "/api/users/me/approvals/client")
self.Check("GET", "/api/users/me/approvals/hunt",
replace={utils.SmartStr(hunt.urn.Basename()): "H:123456"})
class ApiUserSettingsRendererTest(test_lib.GRRBaseTest):
"""Test for ApiUserSettingsRenderer."""
def setUp(self):
super(ApiUserSettingsRendererTest, self).setUp()
self.renderer = user_plugin.ApiUserSettingsRenderer()
def testRendersSettingsForUserCorrespondingToToken(self):
with aff4.FACTORY.Create(
aff4.ROOT_URN.Add("users").Add("foo"),
aff4_type="GRRUser", mode="w", token=self.token) as user_fd:
user_fd.Set(user_fd.Schema.GUI_SETTINGS,
aff4_users.GUISettings(mode="ADVANCED",
canary_mode=True,
docs_location="REMOTE"))
result = self.renderer.Render(None,
token=access_control.ACLToken(username="foo"))
self.assertEqual(result["value"]["mode"]["value"], "ADVANCED")
self.assertEqual(result["value"]["canary_mode"]["value"], True)
self.assertEqual(result["value"]["docs_location"]["value"], "REMOTE")
class ApiUserSettingsRendererRegresstionTest(
api_test_lib.ApiCallRendererRegressionTest):
"""Regression test for ApiUserSettingsRenderer."""
renderer = "ApiUserSettingsRenderer"
def Run(self):
with test_lib.FakeTime(42):
with aff4.FACTORY.Create(
aff4.ROOT_URN.Add("users").Add(self.token.username),
aff4_type="GRRUser", mode="w", token=self.token) as user_fd:
user_fd.Set(user_fd.Schema.GUI_SETTINGS,
aff4_users.GUISettings(canary_mode=True))
self.Check("GET", "/api/users/me/settings")
class ApiSetUserSettingsRendererTest(test_lib.GRRBaseTest):
"""Tests for ApiSetUserSettingsRenderer."""
def setUp(self):
super(ApiSetUserSettingsRendererTest, self).setUp()
self.renderer = user_plugin.ApiSetUserSettingsRenderer()
def testSetsSettingsForUserCorrespondingToToken(self):
settings = aff4_users.GUISettings(mode="ADVANCED",
canary_mode=True,
docs_location="REMOTE")
# Render the request - effectively applying the settings for user "foo".
result = self.renderer.Render(settings,
token=access_control.ACLToken(username="foo"))
self.assertEqual(result["status"], "OK")
# Check that settings for user "foo" were applied.
fd = aff4.FACTORY.Open("aff4:/users/foo", token=self.token)
self.assertEqual(fd.Get(fd.Schema.GUI_SETTINGS), settings)
def main(argv):
test_lib.main(argv)
if __name__ == "__main__":
flags.StartMain(main)
|
pombredanne/grr
|
gui/api_plugins/user_test.py
|
Python
|
apache-2.0
| 7,857
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Gregory Shulov (gregory.shulov@gmail.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: infini_host
version_added: 2.3
short_description: Create, Delete and Modify Hosts on Infinibox
description:
- This module creates, deletes or modifies hosts on Infinibox.
author: Gregory Shulov (@GR360RY)
options:
name:
description:
- Host Name
required: true
state:
description:
- Creates/Modifies Host when present or removes when absent
required: false
default: present
choices: [ "present", "absent" ]
wwns:
description:
- List of wwns of the host
required: false
volume:
description:
- Volume name to map to the host
required: false
extends_documentation_fragment:
- infinibox
'''
EXAMPLES = '''
- name: Create new new host
infini_host:
name: foo.example.com
user: admin
password: secret
system: ibox001
- name: Make sure host bar is available with wwn ports
infini_host:
name: bar.example.com
wwns:
- "00:00:00:00:00:00:00"
- "11:11:11:11:11:11:11"
system: ibox01
user: admin
password: secret
- name: Map host foo.example.com to volume bar
infini_host:
name: foo.example.com
volume: bar
system: ibox01
user: admin
password: secret
'''
RETURN = '''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.infinibox import HAS_INFINISDK, api_wrapper, get_system, infinibox_argument_spec
@api_wrapper
def get_host(module, system):
host = None
for h in system.hosts.to_list():
if h.get_name() == module.params['name']:
host = h
break
return host
@api_wrapper
def create_host(module, system):
changed = True
if not module.check_mode:
host = system.hosts.create(name=module.params['name'])
if module.params['wwns']:
for p in module.params['wwns']:
host.add_fc_port(p)
if module.params['volume']:
host.map_volume(system.volumes.get(name=module.params['volume']))
module.exit_json(changed=changed)
@api_wrapper
def update_host(module, host):
changed = False
module.exit_json(changed=changed)
@api_wrapper
def delete_host(module, host):
changed = True
if not module.check_mode:
host.delete()
module.exit_json(changed=changed)
def main():
argument_spec = infinibox_argument_spec()
argument_spec.update(
dict(
name=dict(required=True),
state=dict(default='present', choices=['present', 'absent']),
wwns=dict(type='list'),
volume=dict()
)
)
module = AnsibleModule(argument_spec, supports_check_mode=True)
if not HAS_INFINISDK:
module.fail_json(msg='infinisdk is required for this module')
state = module.params['state']
system = get_system(module)
host = get_host(module, system)
if module.params['volume']:
try:
system.volumes.get(name=module.params['volume'])
except:
module.fail_json(msg='Volume {} not found'.format(module.params['volume']))
if host and state == 'present':
update_host(module, host)
elif host and state == 'absent':
delete_host(module, host)
elif host is None and state == 'absent':
module.exit_json(changed=False)
else:
create_host(module, system)
if __name__ == '__main__':
main()
|
photoninger/ansible
|
lib/ansible/modules/storage/infinidat/infini_host.py
|
Python
|
gpl-3.0
| 3,806
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from datetime import timedelta
from odoo import fields
from odoo.addons.stock.tests.common2 import TestStockCommon
class TestSaleStockLeadTime(TestStockCommon):
@classmethod
def setUpClass(cls):
super(TestSaleStockLeadTime, cls).setUpClass()
# Update the product_1 with type and Customer Lead Time
cls.product_1.write({'type': 'product', 'sale_delay': 5.0})
def test_00_product_company_level_delays(self):
""" In order to check schedule date, set product's Customer Lead Time
and company's Sales Safety Days."""
company = self.env.ref('base.main_company')
# Update company with Sales Safety Days
company.write({'security_lead': 3.00})
# Create sale order of product_1
order = self.env['sale.order'].create({
'partner_id': self.partner_1.id,
'partner_invoice_id': self.partner_1.id,
'partner_shipping_id': self.partner_1.id,
'pricelist_id': self.env.ref('product.list0').id,
'picking_policy': 'direct',
'warehouse_id': self.warehouse_1.id,
'order_line': [(0, 0, {'name': self.product_1.name,
'product_id': self.product_1.id,
'product_uom_qty': 10,
'product_uom': self.uom_unit.id,
'customer_lead': self.product_1.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Picking should be created.")
# Check schedule date of picking
out_date = fields.Datetime.from_string(order.date_order) + timedelta(days=self.product_1.sale_delay) - timedelta(days=company.security_lead)
min_date = fields.Datetime.from_string(order.picking_ids[0].scheduled_date)
self.assertTrue(abs(min_date - out_date) <= timedelta(seconds=1), 'Schedule date of picking should be equal to: order date + Customer Lead Time - Sales Safety Days.')
def test_01_product_route_level_delays(self):
""" In order to check schedule dates, set product's Customer Lead Time
and warehouse route's delay."""
# Update warehouse_1 with Outgoing Shippings pick + pack + ship
self.warehouse_1.write({'delivery_steps': 'pick_pack_ship'})
# Set delay on pull rule
for pull_rule in self.warehouse_1.delivery_route_id.rule_ids:
pull_rule.write({'delay': 2})
# Create sale order of product_1
order = self.env['sale.order'].create({
'partner_id': self.partner_1.id,
'partner_invoice_id': self.partner_1.id,
'partner_shipping_id': self.partner_1.id,
'pricelist_id': self.env.ref('product.list0').id,
'picking_policy': 'direct',
'warehouse_id': self.warehouse_1.id,
'order_line': [(0, 0, {'name': self.product_1.name,
'product_id': self.product_1.id,
'product_uom_qty': 5,
'product_uom': self.uom_unit.id,
'customer_lead': self.product_1.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Pickings should be created.")
# Check schedule date of ship type picking
out = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.out_type_id)
out_min_date = fields.Datetime.from_string(out.scheduled_date)
out_date = fields.Datetime.from_string(order.date_order) + timedelta(days=self.product_1.sale_delay) - timedelta(days=out.move_lines[0].rule_id.delay)
self.assertTrue(abs(out_min_date - out_date) <= timedelta(seconds=1), 'Schedule date of ship type picking should be equal to: order date + Customer Lead Time - pull rule delay.')
# Check schedule date of pack type picking
pack = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.pack_type_id)
pack_min_date = fields.Datetime.from_string(pack.scheduled_date)
pack_date = out_date - timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertTrue(abs(pack_min_date - pack_date) <= timedelta(seconds=1), 'Schedule date of pack type picking should be equal to: Schedule date of ship type picking - pull rule delay.')
# Check schedule date of pick type picking
pick = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.pick_type_id)
pick_min_date = fields.Datetime.from_string(pick.scheduled_date)
pick_date = pack_date - timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertTrue(abs(pick_min_date - pick_date) <= timedelta(seconds=1), 'Schedule date of pick type picking should be equal to: Schedule date of pack type picking - pull rule delay.')
def test_02_if_propagate_date(self):
""" In order to check schedule dates, set product's Customer Lead Time
and warehouse route's delay with propagate True in stock rules"""
#Example :
#-> set 'propagate_date' = True in stock rules
#-> set propagate_date_minimum_delta = 5 days
#-> Set Warehouse with Outgoing Shipments : pick + pack + ship
#-> Set delay and propagate_date_minimum_delta on stock rules : 5 days and propagate_date = True
#-> Set Customer Lead Time on product : 30 days
#-> Create an SO and confirm it with confirmation Date : 12/18/2018
#-> Pickings : OUT -> Scheduled Date : 01/12/2019
# PACK -> Scheduled Date : 01/07/2019
# PICK -> Scheduled Date : 01/02/2019
#-> Now, change date of pick = +5 days
#-> Scheduled Date should be changed:
# OUT -> Scheduled Date : 01/17/2019
# PACK -> Scheduled Date : 01/12/2019
# PICK -> Scheduled Date : 01/07/2019
# set the propagate_date and
# set propagate_date_minimum_delta = 5 in the stock rule
# Update warehouse_1 with Outgoing Shippings pick + pack + ship
self.warehouse_1.write({'delivery_steps': 'pick_pack_ship'})
# Set delay on pull rule
self.warehouse_1.delivery_route_id.rule_ids.write({'delay': 5, 'propagate_date': True, 'propagate_date_minimum_delta': 5})
# Update the product_1 with type and Customer Lead Time
self.product_1.write({'type': 'product',
'sale_delay': 30.0})
# Now, create sale order of product_1 with customer_lead set on product
order = self.env['sale.order'].create({
'partner_id': self.partner_1.id,
'partner_invoice_id': self.partner_1.id,
'partner_shipping_id': self.partner_1.id,
'pricelist_id': self.env.ref('product.list0').id,
'picking_policy': 'direct',
'warehouse_id': self.warehouse_1.id,
'order_line': [(0, 0, {'name': self.product_1.name,
'product_id': self.product_1.id,
'product_uom_qty': 5,
'product_uom': self.uom_unit.id,
'customer_lead': self.product_1.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Pickings should be created.")
# Check schedule date of ship type picking
out = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.out_type_id)
out_min_date = out.scheduled_date
out_date = order.date_order + timedelta(days=self.product_1.sale_delay) - timedelta(days=out.move_lines[0].rule_id.delay)
self.assertTrue(abs(out_min_date - out_date) <= timedelta(seconds=1), 'Schedule date of ship type picking should be equal to: order date + Customer Lead Time - pull rule delay.')
# Check schedule date of pack type picking
pack = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.pack_type_id)
pack_min_date = pack.scheduled_date
pack_date = out_date - timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertTrue(abs(pack_min_date - pack_date) <= timedelta(seconds=1), 'Schedule date of pack type picking should be equal to: Schedule date of ship type picking - pull rule delay.')
# Check schedule date of pick type picking
pick = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.pick_type_id)
pick_min_date = pick.scheduled_date
pick_date = pack_date - timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertTrue(abs(pick_min_date - pick_date) <= timedelta(seconds=1), 'Schedule date of pick type picking should be equal to: Schedule date of pack type picking - pull rule delay.')
# Now change the schedule date of pick
# Note : pack and out has change scheduled_date automatically based on delay set on pick
pick.write({'scheduled_date': pick_min_date + timedelta(days=5)})
# Now check scheduled_date of pack and out are changed or not based on propagate is true on rules?
self.assertEqual(pack.scheduled_date, (pack_min_date + timedelta(days=5)),
'Schedule date of pack should be changed based on delay.')
self.assertEqual(out.scheduled_date, (out_min_date + timedelta(days=5)),
'Schedule date of out should be changed based on delay.')
def test_03_no_propagate_date(self):
""" In order to check schedule dates, set product's Customer Lead Time
and warehouse route's delay with propagate False in stock rule"""
#Example :
#-> Set Warehouse with Outgoing Shipments : pick + pack + ship
#-> Set delay on stock rules : 5 days and propagate = False
#-> Set Customer Lead Time on product : 30 days
#-> Create an SO and confirm it with confirmation Date : 12/18/2018
#-> Pickings : OUT -> Scheduled Date : 01/12/2019
# PACK -> Scheduled Date : 01/07/2019
# PICK -> Scheduled Date : 01/02/2019
#-> Now, change date of pick = +5 days
#-> Scheduled Date should be not changed:
# OUT -> Scheduled Date : 01/12/2019
# PACK -> Scheduled Date : 01/07/2019
# PICK -> Scheduled Date : 01/07/2019
# Update warehouse_1 with Outgoing Shippings pick + pack + ship
self.warehouse_1.write({'delivery_steps': 'pick_pack_ship'})
# Set delay on pull rule
for pull_rule in self.warehouse_1.delivery_route_id.rule_ids:
pull_rule.write({'delay': 5, 'propagate_date': False})
# Update the product_1 with type and Customer Lead Time
self.product_1.write({'type': 'product',
'sale_delay': 30.0})
#Create sale order of product_1
order = self.env['sale.order'].create({
'partner_id': self.partner_1.id,
'partner_invoice_id': self.partner_1.id,
'partner_shipping_id': self.partner_1.id,
'pricelist_id': self.env.ref('product.list0').id,
'picking_policy': 'direct',
'warehouse_id': self.warehouse_1.id,
'order_line': [(0, 0, {'name': self.product_1.name,
'product_id': self.product_1.id,
'product_uom_qty': 5,
'product_uom': self.uom_unit.id,
'customer_lead': self.product_1.sale_delay})]})
# Confirm our standard sale order
order.action_confirm()
# Check the picking crated or not
self.assertTrue(order.picking_ids, "Pickings should be created.")
# Check schedule date of ship type picking
out = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.out_type_id)
out_min_date = out.scheduled_date
out_date = order.date_order + timedelta(days=self.product_1.sale_delay) - timedelta(days=out.move_lines[0].rule_id.delay)
self.assertTrue(abs(out_min_date - out_date) <= timedelta(seconds=1), 'Schedule date of ship type picking should be equal to: order date + Customer Lead Time - pull rule delay.')
# Check schedule date of pack type picking
pack = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.pack_type_id)
pack_min_date = pack.scheduled_date
pack_date = out_date - timedelta(days=pack.move_lines[0].rule_id.delay)
self.assertTrue(abs(pack_min_date - pack_date) <= timedelta(seconds=1), 'Schedule date of pack type picking should be equal to: Schedule date of ship type picking - pull rule delay.')
# Check schedule date of pick type picking
pick = order.picking_ids.filtered(lambda r: r.picking_type_id == self.warehouse_1.pick_type_id)
pick_min_date = pick.scheduled_date
pick_date = pack_date - timedelta(days=pick.move_lines[0].rule_id.delay)
self.assertTrue(abs(pick_min_date - pick_date) <= timedelta(seconds=1), 'Schedule date of pick type picking should be equal to: Schedule date of pack type picking - pull rule delay.')
# Now change the schedule date of pick
pick.write({'scheduled_date': pick_min_date + timedelta(days=5)})
# Now check scheduled_date of pack and out are changed or not based on propagate is false on rules?
self.assertEqual(pack.scheduled_date, pack_min_date, 'Schedule date of pack should not be changed.')
self.assertEqual(out.scheduled_date, out_min_date, 'Schedule date of out should not be changed.')
|
ddico/odoo
|
addons/sale_stock/tests/test_sale_stock_lead_time.py
|
Python
|
agpl-3.0
| 14,131
|
from google.appengine.ext import ndb
from google.appengine.ext import blobstore
class Likes(ndb.Model):
post = ndb.KeyProperty(kind='Post')
user_id = ndb.IntegerProperty(repeated=True)
like_count = ndb.IntegerProperty(default=0)
|
ghoshabhi/Multi-User-Blog
|
models/likes_model.py
|
Python
|
mit
| 242
|
from django.conf import settings
from django.contrib.sites.models import RequestSite
from django.contrib.sites.models import Site
from user_registration import signals
from user_registration.forms import RegistrationForm
from user_registration.models import RegistrationProfile
class DefaultBackend(object):
"""
A registration backend which follows a simple workflow:
1. User signs up, inactive account is created.
2. Email is sent to user with activation link.
3. User clicks activation link, account is now active.
Using this backend requires that
* ``registration`` be listed in the ``INSTALLED_APPS`` setting
(since this backend makes use of models defined in this
application).
* The setting ``ACCOUNT_ACTIVATION_DAYS`` be supplied, specifying
(as an integer) the number of days from registration during
which a user may activate their account (after that period
expires, activation will be disallowed).
* The creation of the templates
``registration/activation_email_subject.txt`` and
``registration/activation_email.txt``, which will be used for
the activation email. See the notes for this backends
``register`` method for details regarding these templates.
Additionally, registration can be temporarily closed by adding the
setting ``REGISTRATION_OPEN`` and setting it to
``False``. Omitting this setting, or setting it to ``True``, will
be interpreted as meaning that registration is currently open and
permitted.
Internally, this is accomplished via storing an activation key in
an instance of ``registration.models.RegistrationProfile``. See
that model and its custom manager for full documentation of its
fields and supported operations.
"""
def register(self, request, **kwargs):
"""
Given a username, email address and password, register a new
user account, which will initially be inactive.
Along with the new ``User`` object, a new
``registration.models.RegistrationProfile`` will be created,
tied to that ``User``, containing the activation key which
will be used for this account.
An email will be sent to the supplied email address; this
email should contain an activation link. The email will be
rendered using two templates. See the documentation for
``RegistrationProfile.send_activation_email()`` for
information about these templates and the contexts provided to
them.
After the ``User`` and ``RegistrationProfile`` are created and
the activation email is sent, the signal
``registration.signals.user_registered`` will be sent, with
the new ``User`` as the keyword argument ``user`` and the
class of this backend as the sender.
"""
username, email, password = kwargs['username'], kwargs['email'], kwargs['password1']
if Site._meta.installed:
site = Site.objects.get_current()
else:
site = RequestSite(request)
new_user = RegistrationProfile.objects.create_inactive_user(username, email,
password, site)
signals.user_registered.send(sender=self.__class__,
user=new_user,
request=request)
return new_user
def activate(self, request, activation_key):
"""
Given an an activation key, look up and activate the user
account corresponding to that key (if possible).
After successful activation, the signal
``registration.signals.user_activated`` will be sent, with the
newly activated ``User`` as the keyword argument ``user`` and
the class of this backend as the sender.
"""
activated = RegistrationProfile.objects.activate_user(activation_key)
if activated:
signals.user_activated.send(sender=self.__class__,
user=activated,
request=request)
return activated
def registration_allowed(self, request):
"""
Indicate whether account registration is currently permitted,
based on the value of the setting ``REGISTRATION_OPEN``. This
is determined as follows:
* If ``REGISTRATION_OPEN`` is not specified in settings, or is
set to ``True``, registration is permitted.
* If ``REGISTRATION_OPEN`` is both specified and set to
``False``, registration is not permitted.
"""
return getattr(settings, 'REGISTRATION_OPEN', True)
def get_form_class(self, request):
"""
Return the default form class used for user user_registration.
"""
return RegistrationForm
def post_registration_redirect(self, request, user):
"""
Return the name of the URL to redirect to after successful
user user_registration.
"""
return ('registration_complete', (), {})
def post_activation_redirect(self, request, user):
"""
Return the name of the URL to redirect to after successful
account activation.
"""
return ('registration_activation_complete', (), {})
|
commtrack/commtrack-core
|
apps/user_registration/backends/default/__init__.py
|
Python
|
bsd-3-clause
| 5,410
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
readme = open('README.rst').read()
requirements = [
'Click>=6.7',
'bioblend==0.16.0',
'wrapt',
'pyyaml',
'justbackoff',
'xunit-wrapper>=0.12',
'future',
]
test_requirements = [
# TODO: put package test requirements here
]
# Update it in parsec/__init__.py
version = None
with open('parsec/__init__.py', 'r') as handle:
versline = [x for x in handle.readlines() if 'version' in x][0].strip()
_, vers = versline.split(' = ')
version = vers.strip("'")
subpackages = [x.replace('/', '.') for x in glob.glob('parsec/commands/*') if not x.endswith('.py')]
setup(
name='galaxy-parsec',
version=version,
description='Command-line utilities to assist in interacting with Galaxy servers (http://galaxyproject.org/).',
long_description=readme,
author='Galaxy Project and Community',
author_email='parsec@hx42.org',
url='https://github.com/galaxy-iuc/parsec',
packages=[
'parsec',
'parsec.commands',
] + subpackages,
entry_points='''
[console_scripts]
parsec=parsec.cli:parsec
''',
package_dir={'parsec': 'parsec'},
install_requires=requirements,
license="AFL",
keywords='parsec',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Natural Language :: English',
'Programming Language :: Python :: 3.6',
],
test_suite='tests',
tests_require=test_requirements
)
|
galaxy-iuc/parsec
|
setup.py
|
Python
|
apache-2.0
| 1,747
|
from django.contrib import admin
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from waldur_core.core import admin as core_admin
from waldur_core.structure import admin as structure_admin
from . import executors, models
class JiraPropertyAdmin(
core_admin.UpdateOnlyModelAdmin, structure_admin.BackendModelAdmin, admin.ModelAdmin
):
list_display = ('name', 'description', 'settings')
search_fields = ('name', 'description')
class ProjectTemplateAdmin(JiraPropertyAdmin):
list_display = ('name', 'description')
search_fields = ('name', 'description')
class ProjectAdmin(structure_admin.ResourceAdmin):
actions = ['pull']
class Pull(core_admin.ExecutorAdminAction):
executor = executors.ProjectPullExecutor
short_description = _('Pull')
def validate(self, service_settings):
States = models.Project.States
if service_settings.state not in (States.OK, States.ERRED):
raise ValidationError(_('Project has to be OK or erred.'))
pull = Pull()
class IssueAdmin(structure_admin.BackendModelAdmin):
list_filter = ('project',)
list_display = (
'backend_id',
'type',
'project',
'status',
'reporter_name',
'assignee_name',
)
actions = ['pull']
class Pull(core_admin.ExecutorAdminAction):
executor = executors.IssueUpdateFromBackendExecutor
short_description = _('Pull issue')
pull = Pull()
admin.site.register(models.Priority, JiraPropertyAdmin)
admin.site.register(models.IssueType, JiraPropertyAdmin)
admin.site.register(models.ProjectTemplate, ProjectTemplateAdmin)
admin.site.register(models.Issue, IssueAdmin)
admin.site.register(models.Comment, admin.ModelAdmin)
admin.site.register(models.Project, ProjectAdmin)
|
opennode/nodeconductor-assembly-waldur
|
src/waldur_jira/admin.py
|
Python
|
mit
| 1,868
|
import os
import os.path
import sys
EXTRA_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__)))
if EXTRA_DIR not in sys.path:
sys.path.append(EXTRA_DIR)
import mapillary
import dao
class ListService:
def __init__(self, mysql_dao):
self.mysql_dao = mysql_dao
def ensure_list(self, user, list_name):
pass
class ImageService:
def __init__(self, mysql_dao):
self.mysql_dao = mysql_dao
self.image_dao = dao.ImageDAO(mysql_dao)
def ensure_image(self, mapillary_key):
# Get Mapillary details
m = mapillary.MapillaryRequest()
image = m.get_image(mapillary_key)
# Ensure image
if 'code' in image and image['code'] == 'not_found':
raise ImageNotFoundException("Image not found: "+str(mapillary_key))
self.image_dao.ensure_image(mapillary_key, image['ca'], image['lat'], image['lon'], image['user'], image['captured_at'])
|
simonmikkelsen/mapillary-browser
|
api/services.py
|
Python
|
mit
| 951
|
#!/usr/bin/python3
# Copyright 2020 Timothy Trippel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import sys
from hwfutils.coverage import Coverage
class KCov(Coverage):
def __init__(self, toplevel, data_dir, glob_str, file_ext):
super().__init__(toplevel, data_dir, glob_str, file_ext)
self.extract_all_cov_traces()
def _extract_single_cov_trace(self, cov_trace_file):
# extract test id str and prep data dictionary
test_id = int(cov_trace_file.rstrip(".json").split("_")[-1])
test_id_str = "id:{:06d}".format(test_id)
self.coverage_dict[Coverage.TEST_ID].append(test_id_str)
self.coverage_dict[Coverage.LINES_COVERED].append(0)
self.coverage_dict[Coverage.LINES_TOTAL].append(0)
# open json file and extract coverage data for the DUT only
with open(cov_trace_file, "r") as jf:
cov_data = json.load(jf)
for cov_dict in cov_data['files']:
filename = cov_dict['file']
if ("hw/%s/model" % self.toplevel) in filename:
lines_covered = int(cov_dict['covered_lines'])
lines_total = int(cov_dict['total_lines'])
self.coverage_dict[Coverage.LINES_COVERED][-1] += lines_covered
self.coverage_dict[Coverage.LINES_TOTAL][-1] += lines_total
# compute line coverage percentage
self.coverage_dict[Coverage.LINE_COVERAGE].append(
float(self.coverage_dict[Coverage.LINES_COVERED][-1]) /
float(self.coverage_dict[Coverage.LINES_TOTAL][-1]))
def main(argv):
module_description = "HW Fuzzing LLVM Coverage Extraction"
parser = argparse.ArgumentParser(description=module_description)
parser.add_argument("--output-dir", default="logs")
parser.add_argument("toplevel")
parser.add_argument("kcov_dir")
args = parser.parse_args(argv)
# Load coverage data
cov = KCov(args.toplevel, args.kcov_dir, "kcov_[0-9]*", ".json")
cum_cov = KCov(args.toplevel, args.kcov_dir, "kcov_cum*", ".json")
# Export coverage data to a plotting friendly CSV file.
cov.dump_to_csv("%s/kcov.csv" % args.output_dir)
cum_cov.dump_to_csv("%s/kcov_cum.csv" % args.output_dir)
if __name__ == "__main__":
main(sys.argv[1:])
|
googleinterns/hw-fuzzing
|
infra/base-sim/hwfutils/hwfutils/extract_kcov.py
|
Python
|
apache-2.0
| 2,676
|
# Copyright 2013-2016 Tom Eulenfeld, MIT license
import unittest
import warnings
import h5py
import numpy as np
from obspy import read
from obspy.core import UTCDateTime as UTC
from obspy.core.util import NamedTemporaryFile
from obspyh5 import readh5, writeh5, trace2group, iterh5, set_index
import obspyh5
class HDF5TestCase(unittest.TestCase):
def setUp(self):
self.stream = read().sort()
# add processing info
self.stream.decimate(2)
self.stream.differentiate()
self.stream[0].stats.onset = UTC()
self.stream[0].stats.header = 42
self.stream[0].stats.header2 = 'Test entry'
self.stream[0].stats.header3 = u'Test entry unicode'
stack = dict(group='all', count=5, type=['pw', 2])
self.stream[0].stats.stack = stack
for tr in self.stream:
if 'response' in tr.stats:
del tr.stats.response
def test_is_utc(self):
self.assertTrue(obspyh5._is_utc(UTC()))
self.assertFalse(obspyh5._is_utc(110))
def test_hdf5_plugin(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
stream2 = read(fname).sort()
for tr in stream2:
del tr.stats._format
self.assertEqual(stream, stream2)
def test_hdf5_plugin_and_xcorr_index(self):
set_index('xcorr')
stream = self.stream.copy()
for i, tr in enumerate(stream): # manipulate stats object
station1, station2 = 'ST1', 'ST%d' % i
channel1, channel2 = 'HHZ', u'HHN'
s = tr.stats
# we manipulate seed id so that important information gets
# printed by obspy
s.network, s.station = s.station1, s.channel1 = station1, channel1
s.location, s.channel = s.station2, s.channel2 = station2, channel2
s.network1 = s.network2 = 'BW'
s.location1 = s.location2 = ''
stream.sort()
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
stream2 = read(fname).sort()
for tr in stream2:
del tr.stats._format
set_index()
self.assertEqual(stream, stream2)
def test_hdf5_basic(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
# write stream and read again, append data
writeh5(stream[:1], fname)
self.assertTrue(obspyh5.is_obspyh5(fname))
stream2 = readh5(fname)
writeh5(stream[1:], fname, mode='a')
stream3 = readh5(fname)
self.assertEqual(stream[:1], stream2)
self.assertEqual(stream, stream3)
# read only header
stream3 = readh5(fname, headonly=True)
self.assertEqual(stream2[0].stats, stream3[0].stats)
self.assertEqual(len(stream3[0].data), 0)
# test if group was really created
with h5py.File(fname, mode='r') as f:
self.assertTrue('waveforms' in f)
# # test numpy headers
stream[0].stats.num = np.array([[5, 4, 3], [1, 2, 3.]])
writeh5(stream, fname)
stream2 = readh5(fname)
# stream/stats comparison not working for arrays
# therefore checking directly
np.testing.assert_array_equal(stream[0].stats.num,
stream2[0].stats.num)
del stream[0].stats.num
# check for warning for unsupported types
stream[0].stats.toomuch = object()
with warnings.catch_warnings(record=True) as w:
writeh5(stream, fname)
warnings.simplefilter("always")
self.assertEqual(len(w), 1)
del stream[0].stats.toomuch
def test_hdf5_interface(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
with h5py.File(ft.name, mode='w') as f:
trace2group(stream[0], f)
# test override
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always")
trace2group(stream[0], f)
self.assertEqual(len(w), 1)
with self.assertRaises(KeyError):
trace2group(stream[0], f, key=None, override='raise')
# is_obspyh5 is only working with file names
self.assertFalse(obspyh5.is_obspyh5(f))
def test_hdf5_iter(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
traces = []
for tr in iterh5(fname):
traces.append(tr)
self.assertEqual(stream.traces, traces)
def test_hdf5_readonly(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
ro = {'network': 'BW', 'station': u'RJOB', 'location': '',
'channel': 'EHE'}
stream2 = read(fname, 'H5', readonly=ro)
self.assertEqual(stream[0].id, stream2[0].id)
ro = {'network': 'BW', 'station': 'RJOB'}
stream2 = read(fname, 'H5', readonly=ro)
self.assertEqual(len(stream2), 3)
def test_hdf5_headonly(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
stream2 = read(fname, 'H5', headonly=True)
stream2[0].stats.header = -42
self.assertEqual(len(stream2[0]), 0)
stream2.write(fname, 'H5', mode='a', headonly=True)
stream2 = read(fname, 'H5')
self.assertEqual(stream2[0].stats.header, -42)
stream2[0].stats.header = 42
for tr in stream2:
del tr.stats._format
self.assertEqual(stream, stream2)
def test_stored_index(self):
stream = self.stream
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
set_index('nonesens')
stream.write(fname, 'H5', mode='a', override='ignore')
set_index()
def test_read_files_saved_prior_version_0_3(self):
stream = self.stream
index_v_0_2 = ('{network}.{station}/{location}.{channel}/'
'{starttime.datetime:%Y-%m-%dT%H:%M:%S}_'
'{endtime.datetime:%Y-%m-%dT%H:%M:%S}')
set_index(index_v_0_2)
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5', group='waveforms')
stream2 = read(fname, 'H5', group='waveforms')
stream3 = read(fname, 'H5')
set_index()
for tr in stream2:
del tr.stats._format
for tr in stream3:
del tr.stats._format
self.assertEqual(stream, stream2)
self.assertEqual(stream, stream3)
def test_trc_num(self):
stream = self.stream.copy()
set_index('waveforms/{trc_num:03d}')
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
stream.write(fname, 'H5', mode='a', offset_trc_num=3)
stream2 = read(fname, 'H5')
for tr in stream2:
del tr.stats._format
set_index()
self.assertEqual(len(stream2), 6)
self.assertEqual(stream2[::2], stream)
self.assertEqual(stream2[1::2], stream)
def test_attrib_dict(self):
stream = self.stream.copy()
stream[0].stats.ad = {'bla': 0, 'bla2': 'test', 'bla3': [4, 5]}
stream[0].stats.ad.nested = {'x': 'next', 'y': 'level'}
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
stream2 = read(fname, 'H5')
for tr in stream2:
del tr.stats._format
self.assertEqual(stream2, stream)
def test_without_json(self):
stream = self.stream.copy()
for tr in stream:
del tr.stats.processing
with NamedTemporaryFile(suffix='.h5') as ft:
fname = ft.name
stream.write(fname, 'H5')
stream2 = read(fname, 'H5')
for tr in stream2:
del tr.stats._format
self.assertEqual(stream2, stream)
def suite():
return unittest.makeSuite(HDF5TestCase, 'test')
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
trichter/obspyh5
|
test_obspyh5.py
|
Python
|
mit
| 8,782
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v8.common.types import criteria
from google.ads.googleads.v8.enums.types import frequency_cap_time_unit
from google.ads.googleads.v8.enums.types import reach_plan_ad_length
from google.ads.googleads.v8.enums.types import reach_plan_age_range
from google.ads.googleads.v8.enums.types import reach_plan_network
__protobuf__ = proto.module(
package="google.ads.googleads.v8.services",
marshal="google.ads.googleads.v8",
manifest={
"ListPlannableLocationsRequest",
"ListPlannableLocationsResponse",
"PlannableLocation",
"ListPlannableProductsRequest",
"ListPlannableProductsResponse",
"ProductMetadata",
"PlannableTargeting",
"GenerateProductMixIdeasRequest",
"Preferences",
"GenerateProductMixIdeasResponse",
"ProductAllocation",
"GenerateReachForecastRequest",
"EffectiveFrequencyLimit",
"FrequencyCap",
"Targeting",
"CampaignDuration",
"PlannedProduct",
"GenerateReachForecastResponse",
"ReachCurve",
"ReachForecast",
"Forecast",
"PlannedProductReachForecast",
"PlannedProductForecast",
"OnTargetAudienceMetrics",
"EffectiveFrequencyBreakdown",
},
)
class ListPlannableLocationsRequest(proto.Message):
r"""Request message for
[ReachPlanService.ListPlannableLocations][google.ads.googleads.v8.services.ReachPlanService.ListPlannableLocations].
"""
class ListPlannableLocationsResponse(proto.Message):
r"""The list of plannable locations.
Attributes:
plannable_locations (Sequence[google.ads.googleads.v8.services.types.PlannableLocation]):
The list of locations available for planning
(Countries, DMAs, sub-countries).
For locations like Countries and DMAs see
https://developers.google.com/google-
ads/api/reference/data/geotargets for more
information.
"""
plannable_locations = proto.RepeatedField(
proto.MESSAGE, number=1, message="PlannableLocation",
)
class PlannableLocation(proto.Message):
r"""A plannable location: a country, a DMA, a metro region, a tv
region, a province.
Attributes:
id (str):
The location identifier.
name (str):
The unique location name in english.
parent_country_id (int):
The parent country code, not present if
location is a country. If present will always be
a criterion id: additional information, such as
country name are returned both via
ListPlannableLocations or directly by accessing
GeoTargetConstantService with the criterion id.
"""
id = proto.Field(proto.STRING, number=4, optional=True,)
name = proto.Field(proto.STRING, number=5, optional=True,)
parent_country_id = proto.Field(proto.INT64, number=6, optional=True,)
class ListPlannableProductsRequest(proto.Message):
r"""Request to list available products in a given location.
Attributes:
plannable_location_id (str):
Required. The ID of the selected location for
planning. To list the available plannable
location ids use ListPlannableLocations.
"""
plannable_location_id = proto.Field(proto.STRING, number=2,)
class ListPlannableProductsResponse(proto.Message):
r"""A response with all available products.
Attributes:
product_metadata (Sequence[google.ads.googleads.v8.services.types.ProductMetadata]):
The list of products available for planning
and related targeting metadata.
"""
product_metadata = proto.RepeatedField(
proto.MESSAGE, number=1, message="ProductMetadata",
)
class ProductMetadata(proto.Message):
r"""The metadata associated with an available plannable product.
Attributes:
plannable_product_code (str):
The code associated with the ad product. E.g. BUMPER,
TRUEVIEW_IN_STREAM To list the available plannable product
codes use ListPlannableProducts.
plannable_product_name (str):
The name associated with the ad product.
plannable_targeting (google.ads.googleads.v8.services.types.PlannableTargeting):
The allowed plannable targeting for this
product.
"""
plannable_product_code = proto.Field(proto.STRING, number=4, optional=True,)
plannable_product_name = proto.Field(proto.STRING, number=3,)
plannable_targeting = proto.Field(
proto.MESSAGE, number=2, message="PlannableTargeting",
)
class PlannableTargeting(proto.Message):
r"""The targeting for which traffic metrics will be reported.
Attributes:
age_ranges (Sequence[google.ads.googleads.v8.enums.types.ReachPlanAgeRangeEnum.ReachPlanAgeRange]):
Allowed plannable age ranges for the product
for which metrics will be reported. Actual
targeting is computed by mapping this age range
onto standard Google common.AgeRangeInfo values.
genders (Sequence[google.ads.googleads.v8.common.types.GenderInfo]):
Targetable genders for the ad product.
devices (Sequence[google.ads.googleads.v8.common.types.DeviceInfo]):
Targetable devices for the ad product.
networks (Sequence[google.ads.googleads.v8.enums.types.ReachPlanNetworkEnum.ReachPlanNetwork]):
Targetable networks for the ad product.
"""
age_ranges = proto.RepeatedField(
proto.ENUM,
number=1,
enum=reach_plan_age_range.ReachPlanAgeRangeEnum.ReachPlanAgeRange,
)
genders = proto.RepeatedField(
proto.MESSAGE, number=2, message=criteria.GenderInfo,
)
devices = proto.RepeatedField(
proto.MESSAGE, number=3, message=criteria.DeviceInfo,
)
networks = proto.RepeatedField(
proto.ENUM,
number=4,
enum=reach_plan_network.ReachPlanNetworkEnum.ReachPlanNetwork,
)
class GenerateProductMixIdeasRequest(proto.Message):
r"""Request message for
[ReachPlanService.GenerateProductMixIdeas][google.ads.googleads.v8.services.ReachPlanService.GenerateProductMixIdeas].
Attributes:
customer_id (str):
Required. The ID of the customer.
plannable_location_id (str):
Required. The ID of the location, this is one
of the ids returned by ListPlannableLocations.
currency_code (str):
Required. Currency code.
Three-character ISO 4217 currency code.
budget_micros (int):
Required. Total budget.
Amount in micros. One million is equivalent to
one unit.
preferences (google.ads.googleads.v8.services.types.Preferences):
The preferences of the suggested product mix.
An unset preference is interpreted as all
possible values are allowed, unless explicitly
specified.
"""
customer_id = proto.Field(proto.STRING, number=1,)
plannable_location_id = proto.Field(proto.STRING, number=6,)
currency_code = proto.Field(proto.STRING, number=7,)
budget_micros = proto.Field(proto.INT64, number=8,)
preferences = proto.Field(proto.MESSAGE, number=5, message="Preferences",)
class Preferences(proto.Message):
r"""Set of preferences about the planned mix.
Attributes:
is_skippable (bool):
True if ad skippable.
If not set, default is any value.
starts_with_sound (bool):
True if ad start with sound.
If not set, default is any value.
ad_length (google.ads.googleads.v8.enums.types.ReachPlanAdLengthEnum.ReachPlanAdLength):
The length of the ad.
If not set, default is any value.
top_content_only (bool):
True if ad will only show on the top content.
If not set, default is false.
has_guaranteed_price (bool):
True if the price guaranteed. The cost of
serving the ad is agreed upfront and not subject
to an auction. If not set, default is any value.
"""
is_skippable = proto.Field(proto.BOOL, number=6, optional=True,)
starts_with_sound = proto.Field(proto.BOOL, number=7, optional=True,)
ad_length = proto.Field(
proto.ENUM,
number=3,
enum=reach_plan_ad_length.ReachPlanAdLengthEnum.ReachPlanAdLength,
)
top_content_only = proto.Field(proto.BOOL, number=8, optional=True,)
has_guaranteed_price = proto.Field(proto.BOOL, number=9, optional=True,)
class GenerateProductMixIdeasResponse(proto.Message):
r"""The suggested product mix.
Attributes:
product_allocation (Sequence[google.ads.googleads.v8.services.types.ProductAllocation]):
A list of products (ad formats) and the
associated budget allocation idea.
"""
product_allocation = proto.RepeatedField(
proto.MESSAGE, number=1, message="ProductAllocation",
)
class ProductAllocation(proto.Message):
r"""An allocation of a part of the budget on a given product.
Attributes:
plannable_product_code (str):
Selected product for planning. The product
codes returned are within the set of the ones
returned by ListPlannableProducts when using the
same location id.
budget_micros (int):
The value to be allocated for the suggested
product in requested currency. Amount in micros.
One million is equivalent to one unit.
"""
plannable_product_code = proto.Field(proto.STRING, number=3, optional=True,)
budget_micros = proto.Field(proto.INT64, number=4, optional=True,)
class GenerateReachForecastRequest(proto.Message):
r"""Request message for
[ReachPlanService.GenerateReachForecast][google.ads.googleads.v8.services.ReachPlanService.GenerateReachForecast].
Attributes:
customer_id (str):
Required. The ID of the customer.
currency_code (str):
The currency code.
Three-character ISO 4217 currency code.
campaign_duration (google.ads.googleads.v8.services.types.CampaignDuration):
Required. Campaign duration.
cookie_frequency_cap (int):
Desired cookie frequency cap that will be applied to each
planned product. This is equivalent to the frequency cap
exposed in Google Ads when creating a campaign, it
represents the maximum number of times an ad can be shown to
the same user. If not specified no cap is applied.
This field is deprecated in v4 and will eventually be
removed. Please use cookie_frequency_cap_setting instead.
cookie_frequency_cap_setting (google.ads.googleads.v8.services.types.FrequencyCap):
Desired cookie frequency cap that will be applied to each
planned product. This is equivalent to the frequency cap
exposed in Google Ads when creating a campaign, it
represents the maximum number of times an ad can be shown to
the same user during a specified time interval. If not
specified, no cap is applied.
This field replaces the deprecated cookie_frequency_cap
field.
min_effective_frequency (int):
Desired minimum effective frequency (the number of times a
person was exposed to the ad) for the reported reach metrics
[1-10]. This won't affect the targeting, but just the
reporting. If not specified, a default of 1 is applied.
This field cannot be combined with the
effective_frequency_limit field.
effective_frequency_limit (google.ads.googleads.v8.services.types.EffectiveFrequencyLimit):
The highest minimum effective frequency (the number of times
a person was exposed to the ad) value [1-10] to include in
Forecast.effective_frequency_breakdowns. If not specified,
Forecast.effective_frequency_breakdowns will not be
provided.
The effective frequency value provided here will also be
used as the minimum effective frequency for the reported
reach metrics.
This field cannot be combined with the
min_effective_frequency field.
targeting (google.ads.googleads.v8.services.types.Targeting):
The targeting to be applied to all products
selected in the product mix.
This is planned targeting: execution details
might vary based on the advertising product,
please consult an implementation specialist.
See specific metrics for details on how
targeting affects them.
planned_products (Sequence[google.ads.googleads.v8.services.types.PlannedProduct]):
Required. The products to be forecast.
The max number of allowed planned products is
15.
"""
customer_id = proto.Field(proto.STRING, number=1,)
currency_code = proto.Field(proto.STRING, number=9, optional=True,)
campaign_duration = proto.Field(
proto.MESSAGE, number=3, message="CampaignDuration",
)
cookie_frequency_cap = proto.Field(proto.INT32, number=10, optional=True,)
cookie_frequency_cap_setting = proto.Field(
proto.MESSAGE, number=8, message="FrequencyCap",
)
min_effective_frequency = proto.Field(
proto.INT32, number=11, optional=True,
)
effective_frequency_limit = proto.Field(
proto.MESSAGE,
number=12,
optional=True,
message="EffectiveFrequencyLimit",
)
targeting = proto.Field(proto.MESSAGE, number=6, message="Targeting",)
planned_products = proto.RepeatedField(
proto.MESSAGE, number=7, message="PlannedProduct",
)
class EffectiveFrequencyLimit(proto.Message):
r"""Effective frequency limit.
Attributes:
effective_frequency_breakdown_limit (int):
The highest effective frequency value to include in
Forecast.effective_frequency_breakdowns. This field supports
frequencies 1-10, inclusive.
"""
effective_frequency_breakdown_limit = proto.Field(proto.INT32, number=1,)
class FrequencyCap(proto.Message):
r"""A rule specifying the maximum number of times an ad can be
shown to a user over a particular time period.
Attributes:
impressions (int):
Required. The number of impressions,
inclusive.
time_unit (google.ads.googleads.v8.enums.types.FrequencyCapTimeUnitEnum.FrequencyCapTimeUnit):
Required. The type of time unit.
"""
impressions = proto.Field(proto.INT32, number=3,)
time_unit = proto.Field(
proto.ENUM,
number=2,
enum=frequency_cap_time_unit.FrequencyCapTimeUnitEnum.FrequencyCapTimeUnit,
)
class Targeting(proto.Message):
r"""The targeting for which traffic metrics will be reported.
Attributes:
plannable_location_id (str):
Required. The ID of the selected location.
Plannable locations ID can be obtained from
ListPlannableLocations.
age_range (google.ads.googleads.v8.enums.types.ReachPlanAgeRangeEnum.ReachPlanAgeRange):
Targeted age range.
If not specified, targets all age ranges.
genders (Sequence[google.ads.googleads.v8.common.types.GenderInfo]):
Targeted genders.
If not specified, targets all genders.
devices (Sequence[google.ads.googleads.v8.common.types.DeviceInfo]):
Targeted devices.
If not specified, targets all applicable
devices. Applicable devices vary by product and
region and can be obtained from
ListPlannableProducts.
network (google.ads.googleads.v8.enums.types.ReachPlanNetworkEnum.ReachPlanNetwork):
Targetable network for the ad product.
If not specified, targets all applicable
networks. Applicable networks vary by product
and region and can be obtained from
ListPlannableProducts.
"""
plannable_location_id = proto.Field(proto.STRING, number=6, optional=True,)
age_range = proto.Field(
proto.ENUM,
number=2,
enum=reach_plan_age_range.ReachPlanAgeRangeEnum.ReachPlanAgeRange,
)
genders = proto.RepeatedField(
proto.MESSAGE, number=3, message=criteria.GenderInfo,
)
devices = proto.RepeatedField(
proto.MESSAGE, number=4, message=criteria.DeviceInfo,
)
network = proto.Field(
proto.ENUM,
number=5,
enum=reach_plan_network.ReachPlanNetworkEnum.ReachPlanNetwork,
)
class CampaignDuration(proto.Message):
r"""The duration of a planned campaign.
Attributes:
duration_in_days (int):
The duration value in days.
"""
duration_in_days = proto.Field(proto.INT32, number=2, optional=True,)
class PlannedProduct(proto.Message):
r"""A product being planned for reach.
Attributes:
plannable_product_code (str):
Required. Selected product for planning.
The code associated with the ad product. E.g.
Trueview, Bumper To list the available plannable
product codes use ListPlannableProducts.
budget_micros (int):
Required. Maximum budget allocation in micros for the
selected product. The value is specified in the selected
planning currency_code. E.g. 1 000 000$ = 1 000 000 000 000
micros.
"""
plannable_product_code = proto.Field(proto.STRING, number=3, optional=True,)
budget_micros = proto.Field(proto.INT64, number=4, optional=True,)
class GenerateReachForecastResponse(proto.Message):
r"""Response message containing the generated reach curve.
Attributes:
on_target_audience_metrics (google.ads.googleads.v8.services.types.OnTargetAudienceMetrics):
Reference on target audiences for this curve.
reach_curve (google.ads.googleads.v8.services.types.ReachCurve):
The generated reach curve for the planned
product mix.
"""
on_target_audience_metrics = proto.Field(
proto.MESSAGE, number=1, message="OnTargetAudienceMetrics",
)
reach_curve = proto.Field(proto.MESSAGE, number=2, message="ReachCurve",)
class ReachCurve(proto.Message):
r"""The reach curve for the planned products.
Attributes:
reach_forecasts (Sequence[google.ads.googleads.v8.services.types.ReachForecast]):
All points on the reach curve.
"""
reach_forecasts = proto.RepeatedField(
proto.MESSAGE, number=1, message="ReachForecast",
)
class ReachForecast(proto.Message):
r"""A point on reach curve.
Attributes:
cost_micros (int):
The cost in micros.
forecast (google.ads.googleads.v8.services.types.Forecast):
Forecasted traffic metrics for this point.
planned_product_reach_forecasts (Sequence[google.ads.googleads.v8.services.types.PlannedProductReachForecast]):
The forecasted allocation and traffic metrics
for each planned product at this point on the
reach curve.
"""
cost_micros = proto.Field(proto.INT64, number=5,)
forecast = proto.Field(proto.MESSAGE, number=2, message="Forecast",)
planned_product_reach_forecasts = proto.RepeatedField(
proto.MESSAGE, number=4, message="PlannedProductReachForecast",
)
class Forecast(proto.Message):
r"""Forecasted traffic metrics for the planned products and
targeting.
Attributes:
on_target_reach (int):
Number of unique people reached at least
GenerateReachForecastRequest.min_effective_frequency or
GenerateReachForecastRequest.effective_frequency_limit times
that exactly matches the Targeting.
Note that a minimum number of unique people must be reached
in order for data to be reported. If the minimum number is
not met, the on_target_reach value will be rounded to 0.
total_reach (int):
Total number of unique people reached at least
GenerateReachForecastRequest.min_effective_frequency or
GenerateReachForecastRequest.effective_frequency_limit
times. This includes people that may fall outside the
specified Targeting.
Note that a minimum number of unique people must be reached
in order for data to be reported. If the minimum number is
not met, the total_reach value will be rounded to 0.
on_target_impressions (int):
Number of ad impressions that exactly matches
the Targeting.
total_impressions (int):
Total number of ad impressions. This includes
impressions that may fall outside the specified
Targeting, due to insufficient information on
signed-in users.
viewable_impressions (int):
Number of times the ad's impressions were
considered viewable. See
https://support.google.com/google-
ads/answer/7029393 for more information about
what makes an ad viewable and how viewability is
measured.
effective_frequency_breakdowns (Sequence[google.ads.googleads.v8.services.types.EffectiveFrequencyBreakdown]):
A list of effective frequency forecasts. The list is ordered
starting with 1+ and ending with the value set in
GenerateReachForecastRequest.effective_frequency_limit. If
no effective_frequency_limit was set, this list will be
empty.
"""
on_target_reach = proto.Field(proto.INT64, number=5, optional=True,)
total_reach = proto.Field(proto.INT64, number=6, optional=True,)
on_target_impressions = proto.Field(proto.INT64, number=7, optional=True,)
total_impressions = proto.Field(proto.INT64, number=8, optional=True,)
viewable_impressions = proto.Field(proto.INT64, number=9, optional=True,)
effective_frequency_breakdowns = proto.RepeatedField(
proto.MESSAGE, number=10, message="EffectiveFrequencyBreakdown",
)
class PlannedProductReachForecast(proto.Message):
r"""The forecasted allocation and traffic metrics for a specific
product at a point on the reach curve.
Attributes:
plannable_product_code (str):
Selected product for planning. The product
codes returned are within the set of the ones
returned by ListPlannableProducts when using the
same location id.
cost_micros (int):
The cost in micros. This may differ from the
product's input allocation if one or more
planned products cannot fulfill the budget
because of limited inventory.
planned_product_forecast (google.ads.googleads.v8.services.types.PlannedProductForecast):
Forecasted traffic metrics for this product.
"""
plannable_product_code = proto.Field(proto.STRING, number=1,)
cost_micros = proto.Field(proto.INT64, number=2,)
planned_product_forecast = proto.Field(
proto.MESSAGE, number=3, message="PlannedProductForecast",
)
class PlannedProductForecast(proto.Message):
r"""Forecasted traffic metrics for a planned product.
Attributes:
on_target_reach (int):
Number of unique people reached that exactly matches the
Targeting.
Note that a minimum number of unique people must be reached
in order for data to be reported. If the minimum number is
not met, the on_target_reach value will be rounded to 0.
total_reach (int):
Number of unique people reached. This includes people that
may fall outside the specified Targeting.
Note that a minimum number of unique people must be reached
in order for data to be reported. If the minimum number is
not met, the total_reach value will be rounded to 0.
on_target_impressions (int):
Number of ad impressions that exactly matches
the Targeting.
total_impressions (int):
Total number of ad impressions. This includes
impressions that may fall outside the specified
Targeting, due to insufficient information on
signed-in users.
viewable_impressions (int):
Number of times the ad's impressions were
considered viewable. See
https://support.google.com/google-
ads/answer/7029393 for more information about
what makes an ad viewable and how viewability is
measured.
"""
on_target_reach = proto.Field(proto.INT64, number=1,)
total_reach = proto.Field(proto.INT64, number=2,)
on_target_impressions = proto.Field(proto.INT64, number=3,)
total_impressions = proto.Field(proto.INT64, number=4,)
viewable_impressions = proto.Field(proto.INT64, number=5, optional=True,)
class OnTargetAudienceMetrics(proto.Message):
r"""Audience metrics for the planned products.
These metrics consider the following targeting dimensions:
- Location
- PlannableAgeRange
- Gender
Attributes:
youtube_audience_size (int):
Reference audience size matching the
considered targeting for YouTube.
census_audience_size (int):
Reference audience size matching the
considered targeting for Census.
"""
youtube_audience_size = proto.Field(proto.INT64, number=3, optional=True,)
census_audience_size = proto.Field(proto.INT64, number=4, optional=True,)
class EffectiveFrequencyBreakdown(proto.Message):
r"""A breakdown of the number of unique people reached at a given
effective frequency.
Attributes:
effective_frequency (int):
The effective frequency [1-10].
on_target_reach (int):
The number of unique people reached at least
effective_frequency times that exactly matches the
Targeting.
Note that a minimum number of unique people must be reached
in order for data to be reported. If the minimum number is
not met, the on_target_reach value will be rounded to 0.
total_reach (int):
Total number of unique people reached at least
effective_frequency times. This includes people that may
fall outside the specified Targeting.
Note that a minimum number of unique people must be reached
in order for data to be reported. If the minimum number is
not met, the total_reach value will be rounded to 0.
"""
effective_frequency = proto.Field(proto.INT32, number=1,)
on_target_reach = proto.Field(proto.INT64, number=2,)
total_reach = proto.Field(proto.INT64, number=3,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
googleads/google-ads-python
|
google/ads/googleads/v8/services/types/reach_plan_service.py
|
Python
|
apache-2.0
| 27,983
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fmgr_secprof_profile_group
version_added: "2.8"
notes:
- Full Documentation at U(https://ftnt-ansible-docs.readthedocs.io/en/latest/).
author:
- Luke Weighall (@lweighall)
- Andrew Welsh (@Ghilli3)
- Jim Huber (@p4r4n0y1ng)
short_description: Manage security profiles within FortiManager
description:
- Manage security profile group which allows you to create a group of security profiles and apply that to a policy.
options:
adom:
description:
- The ADOM the configuration should belong to.
required: false
default: root
mode:
description:
- Sets one of three modes for managing the object.
- Allows use of soft-adds instead of overwriting existing values.
choices: ['add', 'set', 'delete', 'update']
required: false
default: add
webfilter_profile:
type: str
description:
- Name of an existing Web filter profile.
required: false
waf_profile:
type: str
description:
- Name of an existing Web application firewall profile.
required: false
voip_profile:
type: str
description:
- Name of an existing VoIP profile.
required: false
ssl_ssh_profile:
type: str
description:
- Name of an existing SSL SSH profile.
required: false
ssh_filter_profile:
type: str
description:
- Name of an existing SSH filter profile.
required: false
spamfilter_profile:
type: str
description:
- Name of an existing Spam filter profile.
required: false
profile_protocol_options:
type: str
description:
- Name of an existing Protocol options profile.
required: false
name:
type: str
description:
- Profile group name.
required: false
mms_profile:
type: str
description:
- Name of an existing MMS profile.
required: false
ips_sensor:
type: str
description:
- Name of an existing IPS sensor.
required: false
icap_profile:
type: str
description:
- Name of an existing ICAP profile.
required: false
dnsfilter_profile:
type: str
description:
- Name of an existing DNS filter profile.
required: false
dlp_sensor:
type: str
description:
- Name of an existing DLP sensor.
required: false
av_profile:
type: str
description:
- Name of an existing Antivirus profile.
required: false
application_list:
type: str
description:
- Name of an existing Application list.
required: false
'''
EXAMPLES = '''
- name: DELETE Profile
fmgr_secprof_profile_group:
name: "Ansible_TEST_Profile_Group"
mode: "delete"
- name: CREATE Profile
fmgr_secprof_profile_group:
name: "Ansible_TEST_Profile_Group"
mode: "set"
av_profile: "Ansible_AV_Profile"
profile_protocol_options: "default"
'''
RETURN = """
api_result:
description: full API response, includes status code and message
returned: always
type: str
"""
from ansible.module_utils.basic import AnsibleModule, env_fallback
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortimanager.fortimanager import FortiManagerHandler
from ansible.module_utils.network.fortimanager.common import FMGBaseException
from ansible.module_utils.network.fortimanager.common import FMGRCommon
from ansible.module_utils.network.fortimanager.common import FMGRMethods
from ansible.module_utils.network.fortimanager.common import DEFAULT_RESULT_OBJ
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
from ansible.module_utils.network.fortimanager.common import prepare_dict
from ansible.module_utils.network.fortimanager.common import scrub_dict
###############
# START METHODS
###############
def fmgr_firewall_profile_group_modify(fmgr, paramgram):
"""
:param fmgr: The fmgr object instance from fortimanager.py
:type fmgr: class object
:param paramgram: The formatted dictionary of options to process
:type paramgram: dict
:return: The response from the FortiManager
:rtype: dict
"""
mode = paramgram["mode"]
adom = paramgram["adom"]
url = ""
datagram = {}
response = DEFAULT_RESULT_OBJ
# EVAL THE MODE PARAMETER FOR SET OR ADD
if mode in ['set', 'add', 'update']:
url = '/pm/config/adom/{adom}/obj/firewall/profile-group'.format(adom=adom)
datagram = scrub_dict(prepare_dict(paramgram))
# EVAL THE MODE PARAMETER FOR DELETE
elif mode == "delete":
# SET THE CORRECT URL FOR DELETE
url = '/pm/config/adom/{adom}/obj/firewall/profile-group/{name}'.format(adom=adom, name=paramgram["name"])
datagram = {}
response = fmgr.process_request(url, datagram, paramgram["mode"])
return response
#############
# END METHODS
#############
def main():
argument_spec = dict(
adom=dict(type="str", default="root"),
mode=dict(choices=["add", "set", "delete", "update"], type="str", default="add"),
webfilter_profile=dict(required=False, type="str"),
waf_profile=dict(required=False, type="str"),
voip_profile=dict(required=False, type="str"),
ssl_ssh_profile=dict(required=False, type="str"),
ssh_filter_profile=dict(required=False, type="str"),
spamfilter_profile=dict(required=False, type="str"),
profile_protocol_options=dict(required=False, type="str"),
name=dict(required=False, type="str"),
mms_profile=dict(required=False, type="str"),
ips_sensor=dict(required=False, type="str"),
icap_profile=dict(required=False, type="str"),
dnsfilter_profile=dict(required=False, type="str"),
dlp_sensor=dict(required=False, type="str"),
av_profile=dict(required=False, type="str"),
application_list=dict(required=False, type="str"),
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=False, )
# MODULE PARAMGRAM
paramgram = {
"mode": module.params["mode"],
"adom": module.params["adom"],
"webfilter-profile": module.params["webfilter_profile"],
"waf-profile": module.params["waf_profile"],
"voip-profile": module.params["voip_profile"],
"ssl-ssh-profile": module.params["ssl_ssh_profile"],
"ssh-filter-profile": module.params["ssh_filter_profile"],
"spamfilter-profile": module.params["spamfilter_profile"],
"profile-protocol-options": module.params["profile_protocol_options"],
"name": module.params["name"],
"mms-profile": module.params["mms_profile"],
"ips-sensor": module.params["ips_sensor"],
"icap-profile": module.params["icap_profile"],
"dnsfilter-profile": module.params["dnsfilter_profile"],
"dlp-sensor": module.params["dlp_sensor"],
"av-profile": module.params["av_profile"],
"application-list": module.params["application_list"],
}
module.paramgram = paramgram
fmgr = None
if module._socket_path:
connection = Connection(module._socket_path)
fmgr = FortiManagerHandler(connection, module)
fmgr.tools = FMGRCommon()
else:
module.fail_json(**FAIL_SOCKET_MSG)
results = DEFAULT_RESULT_OBJ
try:
results = fmgr_firewall_profile_group_modify(fmgr, paramgram)
fmgr.govern_response(module=module, results=results,
ansible_facts=fmgr.construct_ansible_facts(results, module.params, paramgram))
except Exception as err:
raise FMGBaseException(err)
return module.exit_json(**results[1])
if __name__ == "__main__":
main()
|
alxgu/ansible
|
lib/ansible/modules/network/fortimanager/fmgr_secprof_profile_group.py
|
Python
|
gpl-3.0
| 8,605
|
import re
from pathlib import Path
from shared import *
# Represent a Python 3 float losslessly as an SMT-LIBv2 Real of minimal length
def real_repr(f):
s = repr(float(f))
m1 = re.search(r'\.([0-9]+)', s)
prec = 0 if m1 is None else len(m1.group(1))
m2 = re.search(r'[Ee]([+-][0-9]+)', s)
prec -= 0 if m2 is None else int(m2.group(1))
prec = max(1, prec)
return ('{:.' + str(prec) + 'f}').format(f)
def nnabla_to_smt2_info(var, names={}, collect={}, rcollect={}, vars=[],
assertions=[], nid=0, normal=True):
if var in rcollect:
return collect # already processed this variable
rcollect[var] = nid
if var not in names:
names[var] = 'var_{}'.format(nid)
collect[nid] = var
cur_name = names[var]
if normal:
assert len(var.shape) == 2
for index in range(var.shape[1]):
vars.append('{}_{}'.format(cur_name, index))
nid += 1
if var.parent is not None:
eprint(var.parent)
eprint(var.parent.inputs)
eprint(type(var.parent.inputs))
for index, input in enumerate(var.parent.inputs):
_, _, _, nid = nnabla_to_smt2_info(input, names, collect, rcollect,
vars, assertions, nid, index == 0)
if var.parent.name == 'ReLU':
assert normal
assert len(var.parent.inputs) == 1
assert var.parent.inputs[0].shape == var.shape
param_name = names[var.parent.inputs[0]]
for index in range(var.shape[1]):
assertions.append('(= {}_{} (max 0.0 {}_{}))'.format(
cur_name, index, param_name, index
))
elif var.parent.name == 'Affine':
# Wx + b -- W and b are trained parameters
assert normal
assert len(var.parent.inputs) == 3
var_x = var.parent.inputs[0]
var_W = var.parent.inputs[1]
var_b = var.parent.inputs[2]
assert len(var_x.shape) == 2
assert len(var_W.shape) == 2
assert len(var_b.shape) == 1
assert var_W.shape[0] == var_x.shape[1]
assert var_W.shape[1] == var.shape[1]
assert var_W.shape[1] == var_b.shape[0]
x_name = names[var_x]
for i in range(var.shape[1]):
terms = []
for j in range(var_x.shape[1]):
terms.append('(* {} {}_{})'.format(
real_repr(var_W.d[j][i]),
x_name,
j
))
assertions.append('(= {}_{} (+ {} {}))'.format(
cur_name,
i,
real_repr(var_b.d[i]),
' '.join(terms)
))
else:
raise Exception('Unsupported function: {}'.format(var.parent.name))
return collect, vars, assertions, nid
def nnabla_to_smt2(var, names={}, save_test=None, seed=None, test_seed=None,
test_eps=1e-6, test_batch=None, include=None, std=False):
collect, vars, assertions, _ = nnabla_to_smt2_info(var, names)
smt2 = ''
smt2 += '(set-logic QF_NRA)\n'
if std:
smt2 += '\n(define-fun max ((x Real) (y Real)) Real ' \
'(ite (>= x y) x y))\n'
if seed:
smt2 += '\n; Training seed = {}\n'.format(seed)
smt2 += '\n; NN variables\n\n'
smt2 += ''.join(map(lambda n: '(declare-fun {} () Real)\n'.format(n), vars))
smt2 += '\n; NN assertions\n\n'
smt2 += ''.join(map(lambda a: '(assert {})\n'.format(a), assertions))
smt2 += '\n'
if save_test is not None:
if test_seed:
smt2 += '; Test seed = {}\n\n'.format(test_seed)
(x, y) = (save_test, var)
assert x.shape[0] == y.shape[0]
assert test_batch <= x.shape[0]
smt2 += '; Assertion for test data\n\n'
cases = []
for i in range(0, test_batch):
cases.append(('(and (= {} {}) (= {} {})\n '
' (or (< {} {}) (> {} {})\n '
' (< {} {}) (> {} {})))').format(
names[x] + '_0', real_repr(x.d[i][0]),
names[x] + '_1', real_repr(x.d[i][1]),
names[y] + '_0', real_repr(y.d[i][0] - test_eps),
names[y] + '_0', real_repr(y.d[i][0] + test_eps),
names[y] + '_1', real_repr(y.d[i][1] - test_eps),
names[y] + '_1', real_repr(y.d[i][1] + test_eps)
))
smt2 += '(assert (or\n {}))\n\n'.format('\n '.join(cases))
if include is not None:
smt2 += Path(include).read_text('utf-8') + '\n'
smt2 += '(check-sat)\n'
smt2 += '(exit)\n'
return smt2
def parse_smt2(string):
tokreg = r'(?x) ( \s+ )| [()] | [^\s()]+ '
cur = []
stack = []
for match in re.finditer(tokreg, string):
if match.group(1) is not None:
continue
elif match.group(0) == '(':
new = []
stack.append(cur)
cur.append(new)
cur = new
elif match.group(0) == ')':
cur = stack.pop()
else:
try:
cur.append(float(match.group(0)))
except ValueError:
cur.append(match.group(0))
return cur
def parse_smt2_file(filename):
return parse_smt2(Path(filename).read_text('utf-8'))
|
martinjos/nn-circle
|
nn_smt2.py
|
Python
|
apache-2.0
| 5,455
|
import os
import uuid
from ..tools import row2dict, xls_reader
from datetime import datetime
from sqlalchemy import not_, func
from pyramid.view import (
view_config,
)
from pyramid.httpexceptions import (
HTTPFound,
)
import colander
from deform import (
Form,
widget,
ValidationFailure,
)
from ..models import (
DBSession,
Group
)
from datatables import ColumnDT, DataTables
from ..views.base_view import BaseViews
SESS_ADD_FAILED = 'Tambah group gagal'
SESS_EDIT_FAILED = 'Edit group gagal'
class AddSchema(colander.Schema):
group_name = colander.SchemaNode(
colander.String(),
validator=colander.Length(max=18))
description = colander.SchemaNode(
colander.String())
class EditSchema(AddSchema):
id = colander.SchemaNode(colander.String(),
missing=colander.drop,
widget=widget.HiddenWidget(readonly=True))
class view_group(BaseViews):
########
# List #
########
@view_config(route_name='group', renderer='templates/group/list.pt',
permission='read')
def view_list(self):
return dict(a={})
##########
# Action #
##########
@view_config(route_name='group-act', renderer='json',
permission='read')
def gaji_group_act(self):
ses = self.request.session
req = self.request
params = req.params
url_dict = req.matchdict
if url_dict['act']=='grid':
columns = []
columns.append(ColumnDT('id'))
columns.append(ColumnDT('group_name'))
columns.append(ColumnDT('description'))
columns.append(ColumnDT('member_count'))
query = DBSession.query(Group)
rowTable = DataTables(req, Group, query, columns)
return rowTable.output_result()
elif url_dict['act']=='headofnama':
term = 'term' in params and params['term'] or ''
rows = DBSession.query(Group.id, Group.group_name
).filter(
Group.group_name.ilike('%%%s%%' % term) ).all()
r = []
for k in rows:
d={}
d['id'] = k[0]
d['value'] = k[1]
r.append(d)
return r
#######
# Add #
#######
def form_validator(self, form, value):
if 'id' in form.request.matchdict:
uid = form.request.matchdict['id']
q = DBSession.query(Group).filter_by(id=uid)
group = q.first()
else:
group = None
def get_form(self, class_form, row=None):
schema = class_form(validator=self.form_validator)
schema = schema.bind()
schema.request = self.request
if row:
schema.deserialize(row)
return Form(schema, buttons=('simpan','batal'))
def save(self, values, user, row=None):
if not row:
row = Group()
row.created = datetime.now()
row.create_uid = user.id
row.from_dict(values)
row.updated = datetime.now()
row.update_uid = user.id
row.disabled = 'disabled' in values and values['disabled'] and 1 or 0
DBSession.add(row)
DBSession.flush()
return row
def save_request(self, values, row=None):
if 'id' in self.request.matchdict:
values['id'] = self.request.matchdict['id']
row = self.save(values, self.request.user, row)
self.request.session.flash('group sudah disimpan.')
def route_list(self):
return HTTPFound(location=self.request.route_url('group'))
def session_failed(self, session_name):
r = dict(form=self.session[session_name])
del self.session[session_name]
return r
@view_config(route_name='group-add', renderer='templates/group/add.pt',
permission='add')
def view_group_add(self):
req = self.request
ses = self.session
form = self.get_form(AddSchema)
if req.POST:
if 'simpan' in req.POST:
controls = req.POST.items()
try:
c = form.validate(controls)
except ValidationFailure, e:
req.session[SESS_ADD_FAILED] = e.render()
return HTTPFound(location=req.route_url('group-add'))
self.save_request(dict(controls))
return self.route_list()
elif SESS_ADD_FAILED in req.session:
return self.session_failed(SESS_ADD_FAILED)
return dict(form=form.render())
########
# Edit #
########
def query_id(self):
return DBSession.query(Group).filter_by(id=self.request.matchdict['id'])
def id_not_found(self):
msg = 'group ID %s Tidak Ditemukan.' % self.request.matchdict['id']
request.session.flash(msg, 'error')
return route_list()
@view_config(route_name='group-edit', renderer='templates/group/edit.pt',
permission='edit')
def view_group_edit(self):
request = self.request
row = self.query_id().first()
if not row:
return id_not_found(request)
form = self.get_form(EditSchema)
if request.POST:
if 'simpan' in request.POST:
controls = request.POST.items()
print controls
try:
c = form.validate(controls)
except ValidationFailure, e:
request.session[SESS_EDIT_FAILED] = e.render()
return HTTPFound(location=request.route_url('group-edit',
id=row.id))
self.save_request(dict(controls), row)
return self.route_list()
elif SESS_EDIT_FAILED in request.session:
return self.session_failed(SESS_EDIT_FAILED)
values = row.to_dict()
return dict(form=form.render(appstruct=values))
##########
# Delete #
##########
@view_config(route_name='group-delete', renderer='templates/group/delete.pt',
permission='delete')
def view_group_delete(self):
request = self.request
q = self.query_id()
row = q.first()
if not row:
return self.id_not_found(request)
form = Form(colander.Schema(), buttons=('hapus','batal'))
if request.POST:
if 'hapus' in request.POST:
msg = 'group ID %d %s sudah dihapus.' % (row.id, row.description)
try:
q.delete()
DBSession.flush()
except:
msg = 'group ID %d %s tidak dapat dihapus.' % (row.id, row.description)
request.session.flash(msg)
return self.route_list()
return dict(row=row,
form=form.render())
|
aagusti/e-gaji
|
egaji/views/m_group.py
|
Python
|
gpl-2.0
| 7,251
|
def binary(x):
return int(bin(x)[2:])
T = input()
while(T):
T -= 1
s1 = raw_input()
s2 = raw_input()
s1 = int(s1,2)
s2 = int(s2,2)
summ = s1+s2
con = binary(summ)
print con
print (s1+s2)%1000000007
|
Dawny33/Code
|
HackerEarth/CodeCrunch/bin.py
|
Python
|
gpl-3.0
| 243
|
"""
@package mi.instrument.satlantic.ocr_507_icsw.ooicore.driver
@file marine-integrations/mi/instrument/satlantic/ocr_507_icsw/ooicore/driver.py
@author Godfrey Duke
@brief Instrument driver classes that provide structure towards interaction
with the Satlantic OCR507 ICSW w/ Midrange Bioshutter
"""
import time
import re
import struct
from mi.core.instrument.chunker import StringChunker
from mi.core.instrument.driver_dict import DriverDictKey
from mi.core.instrument.instrument_protocol import CommandResponseInstrumentProtocol, RE_PATTERN, DEFAULT_CMD_TIMEOUT, \
InitializationType
from mi.core.instrument.protocol_param_dict import ParameterDictType, ParameterDictVisibility
from mi.core.common import BaseEnum, Units
from mi.core.instrument.instrument_driver import SingleConnectionInstrumentDriver, DriverProtocolState, DriverEvent, \
DriverAsyncEvent, ResourceAgentState
from mi.core.instrument.instrument_driver import DriverParameter
from mi.core.instrument.data_particle import CommonDataParticleType, DataParticleValue
from mi.core.common import InstErrorCode
from mi.core.instrument.instrument_fsm import ThreadSafeFSM
from mi.core.exceptions import SampleException, InstrumentParameterException, InstrumentProtocolException, \
InstrumentException, InstrumentTimeoutException, InstrumentCommandException
from mi.core.instrument.data_particle import DataParticle, DataParticleKey
from mi.core.log import get_logger, get_logging_metaclass
__author__ = 'Godfrey Duke'
__license__ = 'Apache 2.0'
log = get_logger()
# ###################################################################
# Module-wide values
####################################################################
# NOTE: Regex deviates from manual slightly. Manual indicates the timer field should be 10 bytes
# but data collected from the instrument shows this will overflow to 11 bytes if left running long
# enough. 11 bytes is enough to represent 3 years of continuous collect, so limiting it there.
SAMPLE_PATTERN = r'(?P<instrument_id>SATDI7)(?P<serial_number>\d{4})(?P<timer>\d{7,8}\.\d\d)(?P<binary_data>.{37})(?P<checksum>.)\r\n'
SAMPLE_REGEX = re.compile(SAMPLE_PATTERN, re.DOTALL)
CONFIG_PATTERN = '''Satlantic\ OCR.*?
Firmware\ version:\ (?P<firmware_version>.*?)\s*
Instrument:\ (?P<instrument_id>\w+)\s*
S\/N:\ (?P<serial_number>\w+).*?
Telemetry\ Baud\ Rate:\ (?P<telemetry_baud_rate>\d+)\ bps\s*
Maximum\ Frame\ Rate:\ (?P<max_frame_rate>\S+).*?
Initialize\ Silent\ Mode:\ (?P<initialize_silent_mode>off|on)\s*
Initialize\ Power\ Down:\ (?P<initialize_power_down>off|on)\s*
Initialize\ Automatic\ Telemetry:\ (?P<initialize_auto_telemetry>off|on)\s*
Network\ Mode:\ (?P<network_mode>off|on)\s*
Network\ Address:\ (?P<network_address>\d+)\s*
Network\ Baud\ Rate:\ (?P<network_baud_rate>\d+)\ bps.*?
\[Auto'''
# CONFIG_PATTERN = '''Satlantic\ OCR.*?
# Firmware\ version:\ (?P<firmware_version>.*?)\s*
# Instrument:\ (?P<instrument_id>\w+)\s*
# S\/N:\ (?P<serial_number>\w+).*?
# Telemetry\ Baud\ Rate:\ (?P<telemetry_baud_rate>\d+)\ bps\s*
# Maximum\ Frame\ Rate:\ (?P<max_frame_rate>\d+)\ Hz\s*
# Initialize\ Silent\ Mode:\ (?P<initialize_silent_mode>off|on)\s*
# Initialize\ Power\ Down:\ (?P<initialize_power_down>off|on)\s*
# Initialize\ Automatic\ Telemetry:\ (?P<initialize_auto_telemetry>off|on)\s*
# Network\ Mode:\ (?P<network_mode>off|on)\s*
# Network\ Address:\ (?P<network_address>\d+)\s*
# Network\ Baud\ Rate:\ (?P<network_baud_rate>\d+)\ bps.*?
# \[Auto'''
CONFIG_REGEX = re.compile(CONFIG_PATTERN, re.DOTALL | re.VERBOSE)
init_pattern = r'Press <Ctrl\+C> for command console. \r\nInitializing system. Please wait...\r\n'
init_regex = re.compile(init_pattern)
COMMAND_PATTERN = 'Command Console'
RESET_DELAY = 6
EOLN = "\r\n"
RETRY = 3
VALID_MAXRATES = (0, 0.125, 0.25, 0.5, 1, 2, 4, 8, 10, 12)
class DataParticleType(BaseEnum):
RAW = CommonDataParticleType.RAW
PARSED = 'spkir_data_record'
CONFIG = 'spkir_a_configuration_record'
class SatlanticSpecificDriverEvents(BaseEnum):
START_POLL = 'DRIVER_EVENT_START_POLL'
STOP_POLL = 'DRIVER_EVENT_STOP_POLL'
####################################################################
# Static enumerations for this class
####################################################################
class Command(BaseEnum):
SAVE = 'save'
EXIT = 'exit'
EXIT_AND_RESET = 'exit!'
GET = 'show'
SET = 'set'
RESET = '\x12' # CTRL-R
BREAK = '\x03' # CTRL-C
SWITCH_TO_AUTOSAMPLE = '\x01' # CTRL-A
SAMPLE = '\x0D' # CR
ID = 'id'
SHOW_ALL = 'show all'
INVALID = 'foo'
class SatlanticProtocolState(BaseEnum):
COMMAND = DriverProtocolState.COMMAND
AUTOSAMPLE = DriverProtocolState.AUTOSAMPLE
UNKNOWN = DriverProtocolState.UNKNOWN
DIRECT_ACCESS = DriverProtocolState.DIRECT_ACCESS
class SatlanticProtocolEvent(BaseEnum):
ENTER = DriverEvent.ENTER
EXIT = DriverEvent.EXIT
GET = DriverEvent.GET
SET = DriverEvent.SET
DISCOVER = DriverEvent.DISCOVER
START_AUTOSAMPLE = DriverEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = DriverEvent.STOP_AUTOSAMPLE
ACQUIRE_STATUS = DriverEvent.ACQUIRE_STATUS
TEST = DriverEvent.TEST
RUN_TEST = DriverEvent.RUN_TEST
CALIBRATE = DriverEvent.CALIBRATE
EXECUTE_DIRECT = DriverEvent.EXECUTE_DIRECT
START_DIRECT = DriverEvent.START_DIRECT
STOP_DIRECT = DriverEvent.STOP_DIRECT
class SatlanticCapability(BaseEnum):
"""
Protocol events that should be exposed to users (subset of above).
"""
START_AUTOSAMPLE = SatlanticProtocolEvent.START_AUTOSAMPLE
STOP_AUTOSAMPLE = SatlanticProtocolEvent.STOP_AUTOSAMPLE
ACQUIRE_STATUS = SatlanticProtocolEvent.ACQUIRE_STATUS
GET = DriverEvent.GET
SET = DriverEvent.SET
START_DIRECT = DriverEvent.START_DIRECT
STOP_DIRECT = DriverEvent.STOP_DIRECT
class Parameter(DriverParameter):
MAX_RATE = 'maxrate'
INIT_SM = 'initsm'
INIT_AT = 'initat'
NET_MODE = 'netmode'
class Prompt(BaseEnum):
"""
Command Prompt
"""
USAGE = 'Usage'
INVALID_COMMAND = 'unknown command'
COMMAND = ']$'
###############################################################################
# Satlantic OCR507 Sensor Driver.
###############################################################################
class SatlanticOCR507InstrumentDriver(SingleConnectionInstrumentDriver):
"""
The InstrumentDriver class for the Satlantic OCR507 sensor SPKIR.
"""
def __init__(self, evt_callback):
"""Instrument-specific enums
@param evt_callback The callback function to use for events
"""
SingleConnectionInstrumentDriver.__init__(self, evt_callback)
def _build_protocol(self):
""" Construct driver protocol state machine """
self._protocol = SatlanticOCR507InstrumentProtocol(self._driver_event)
class SatlanticOCR507DataParticleKey(BaseEnum):
INSTRUMENT_ID = "instrument_id"
SERIAL_NUMBER = "serial_number"
TIMER = "timer"
SAMPLE_DELAY = "sample_delay"
SAMPLES = "spkir_samples"
REGULATED_INPUT_VOLTAGE = "vin_sense"
ANALOG_RAIL_VOLTAGE = "va_sense"
INTERNAL_TEMP = "internal_temperature"
FRAME_COUNTER = "frame_counter"
CHECKSUM = "checksum"
class SatlanticOCR507ConfigurationParticleKey(BaseEnum):
FIRMWARE_VERSION = 'spkir_a_firmware_version'
INSTRUMENT_ID = "instrument_id"
SERIAL_NUMBER = "serial_number"
TELEMETRY_BAUD_RATE = "telemetry_baud_rate"
MAX_FRAME_RATE = "max_frame_rate"
INIT_SILENT_MODE = "initialize_silent_mode"
INIT_POWER_DOWN = "initialize_power_down"
INIT_AUTO_TELEMETRY = "initialize_auto_telemetry"
NETWORK_MODE = "network_mode"
NETWORK_ADDRESS = "network_address"
NETWORK_BAUD_RATE = "network_baud_rate"
class SatlanticOCR507DataParticle(DataParticle):
"""
Routines for parsing raw data into a data particle structure for the
Satlantic OCR507 sensor. Overrides the building of values, and the rest comes
along for free.
"""
__metaclass__ = get_logging_metaclass(log_level='debug')
_data_particle_type = DataParticleType.PARSED
def _build_parsed_values(self):
"""
Take something in the sample format and split it into
a OCR507 values (with an appropriate tag)
@throws SampleException If there is a problem with sample creation
"""
match = SAMPLE_REGEX.match(self.raw_data)
if not match:
raise SampleException("No regex match of parsed sample data: [%r]" %
self.raw_data)
# Parse the relevant ascii fields
instrument_id = match.group('instrument_id')
serial_number = match.group('serial_number')
timer = float(match.group('timer'))
# Ensure the expected values were present
if not instrument_id:
raise SampleException("No instrument id value parsed")
if not serial_number:
raise SampleException("No serial number value parsed")
if not timer:
raise SampleException("No timer value parsed")
# Parse the relevant binary data
'''
Field Name Field Size (bytes) Description Format Char
---------- ------------------ ----------- -----------
sample_delay 2 BS formatted value h
ch[1-7]_sample 4 BU formatted value I
regulated_input_voltage 2 BU formatted value H
analog_rail_voltage 2 BU formatted value H
internal_temp 2 BU formatted value H
frame_counter 1 BU formatted value B
checksum 1 BU formatted value B
'''
try:
sample_delay, ch1_sample, ch2_sample, ch3_sample, ch4_sample, ch5_sample, ch6_sample, ch7_sample, \
regulated_input_voltage, analog_rail_voltage, internal_temp, frame_counter, checksum \
= struct.unpack('!h7IHHHBB', match.group('binary_data') + match.group('checksum'))
except struct.error, e:
raise SampleException(e)
result = [{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.INSTRUMENT_ID,
DataParticleKey.VALUE: instrument_id},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.SERIAL_NUMBER,
DataParticleKey.VALUE: serial_number},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.TIMER,
DataParticleKey.VALUE: timer},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.SAMPLE_DELAY,
DataParticleKey.VALUE: sample_delay},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.SAMPLES,
DataParticleKey.VALUE: [ch1_sample,
ch2_sample,
ch3_sample,
ch4_sample,
ch5_sample,
ch6_sample,
ch7_sample]},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.REGULATED_INPUT_VOLTAGE,
DataParticleKey.VALUE: regulated_input_voltage},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.ANALOG_RAIL_VOLTAGE,
DataParticleKey.VALUE: analog_rail_voltage},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.INTERNAL_TEMP,
DataParticleKey.VALUE: internal_temp},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.FRAME_COUNTER,
DataParticleKey.VALUE: frame_counter},
{DataParticleKey.VALUE_ID: SatlanticOCR507DataParticleKey.CHECKSUM,
DataParticleKey.VALUE: checksum}]
if not self._checksum_check(self.raw_data):
self.contents[DataParticleKey.QUALITY_FLAG] = DataParticleValue.CHECKSUM_FAILED
log.warn("Invalid checksum encountered: %r.", checksum)
log.debug('OCR507 Data Particle raw data: %r', self.raw_data)
log.debug('OCR507 Data Particle parsed data: %r', result)
return result
def _checksum_check(self, data):
"""
Confirm that the checksum is valid for the data line
@param data The entire line of data, including the checksum
@retval True if the checksum fits, False if the checksum is bad
"""
if data is None or data == '':
return False
match = SAMPLE_REGEX.match(data)
if not match:
return False
try:
line_end = match.end('checksum')
except IndexError:
# Didn't have a checksum!
return False
line = data[:line_end]
# Ensure the low order byte of the sum of all characters from the
# beginning of the frame through the checksum equals 0
checksum_validation = sum(ord(x) for x in line)
checksum_validation &= 0xFF
return checksum_validation == 0
class SatlanticOCR507ConfigurationParticle(DataParticle):
"""
Routines for parsing raw data into a data particle structure for the
Satlantic OCR507 sensor. Overrides the building of values, and the rest comes
along for free.
"""
_data_particle_type = DataParticleType.CONFIG
def _build_parsed_values(self):
"""
Take something in the sample format and split it into
a OCR507 values (with an appropriate tag)
@throws SampleException If there is a problem with sample creation
"""
match = CONFIG_REGEX.match(self.raw_data)
if not match:
raise SampleException("No regex match of parsed configuration data: [%r]" %
self.raw_data)
# Parse the relevant ascii fields
firmware_version = match.group('firmware_version')
instrument_id = match.group('instrument_id')
serial_number = match.group('serial_number')
telemetry_baud_rate = int(match.group('telemetry_baud_rate'))
max_frame_rate = match.group('max_frame_rate')
init_silent_mode = 1 if match.group('initialize_silent_mode') == 'on' else 0
init_power_down = 1 if match.group('initialize_power_down') == 'on' else 0
init_auto_telemetry = 1 if match.group('initialize_auto_telemetry') == 'on' else 0
network_mode = 1 if match.group('network_mode') == 'on' else 0
network_address = int(match.group('network_address'))
network_baud_rate = int(match.group('network_baud_rate'))
result = [{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.FIRMWARE_VERSION,
DataParticleKey.VALUE: firmware_version},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.INSTRUMENT_ID,
DataParticleKey.VALUE: instrument_id},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.SERIAL_NUMBER,
DataParticleKey.VALUE: serial_number},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.TELEMETRY_BAUD_RATE,
DataParticleKey.VALUE: telemetry_baud_rate},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.MAX_FRAME_RATE,
DataParticleKey.VALUE: max_frame_rate},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.INIT_SILENT_MODE,
DataParticleKey.VALUE: init_silent_mode},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.INIT_POWER_DOWN,
DataParticleKey.VALUE: init_power_down},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.INIT_AUTO_TELEMETRY,
DataParticleKey.VALUE: init_auto_telemetry},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.NETWORK_MODE,
DataParticleKey.VALUE: network_mode},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.NETWORK_ADDRESS,
DataParticleKey.VALUE: network_address},
{DataParticleKey.VALUE_ID: SatlanticOCR507ConfigurationParticleKey.NETWORK_BAUD_RATE,
DataParticleKey.VALUE: network_baud_rate}]
log.debug('OCR507 Configuration Particle raw data: %r', self.raw_data)
log.debug('OCR507 Configuration Particle parsed data: %r', result)
return result
####################################################################
# Satlantic OCR507 Sensor Protocol
####################################################################
class SatlanticOCR507InstrumentProtocol(CommandResponseInstrumentProtocol):
"""The instrument protocol classes to deal with a Satlantic OCR507 sensor.
The protocol is a very simple command/response protocol with a few show
commands and a few set commands.
Note protocol state machine must be called "self._protocol_fsm"
"""
_data_particle_type = SatlanticOCR507DataParticle
_config_particle_type = SatlanticOCR507ConfigurationParticle
_data_particle_regex = SAMPLE_REGEX
_config_particle_regex = CONFIG_REGEX
__metaclass__ = get_logging_metaclass(log_level='debug')
def __init__(self, callback=None):
CommandResponseInstrumentProtocol.__init__(self, Prompt, EOLN, callback)
self._last_data_timestamp = None
self._protocol_fsm = ThreadSafeFSM(SatlanticProtocolState, SatlanticProtocolEvent, SatlanticProtocolEvent.ENTER,
SatlanticProtocolEvent.EXIT)
self._protocol_fsm.add_handler(SatlanticProtocolState.UNKNOWN, SatlanticProtocolEvent.ENTER,
self._handler_unknown_enter)
self._protocol_fsm.add_handler(SatlanticProtocolState.UNKNOWN, SatlanticProtocolEvent.DISCOVER,
self._handler_unknown_discover)
self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND, SatlanticProtocolEvent.ENTER,
self._handler_command_enter)
self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND, SatlanticProtocolEvent.GET,
self._handler_command_get)
self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND, SatlanticProtocolEvent.SET,
self._handler_command_set)
self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND, SatlanticProtocolEvent.START_AUTOSAMPLE,
self._handler_command_start_autosample)
self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND, SatlanticProtocolEvent.ACQUIRE_STATUS,
self._handler_command_acquire_status)
self._protocol_fsm.add_handler(SatlanticProtocolState.COMMAND, SatlanticProtocolEvent.START_DIRECT,
self._handler_command_start_direct)
self._protocol_fsm.add_handler(SatlanticProtocolState.AUTOSAMPLE, SatlanticProtocolEvent.ENTER,
self._handler_autosample_enter)
self._protocol_fsm.add_handler(SatlanticProtocolState.AUTOSAMPLE, SatlanticProtocolEvent.STOP_AUTOSAMPLE,
self._handler_autosample_stop_autosample)
self._protocol_fsm.add_handler(SatlanticProtocolState.DIRECT_ACCESS, SatlanticProtocolEvent.ENTER,
self._handler_direct_access_enter)
self._protocol_fsm.add_handler(SatlanticProtocolState.DIRECT_ACCESS, SatlanticProtocolEvent.EXECUTE_DIRECT,
self._handler_direct_access_execute_direct)
self._protocol_fsm.add_handler(SatlanticProtocolState.DIRECT_ACCESS, SatlanticProtocolEvent.STOP_DIRECT,
self._handler_direct_access_stop_direct)
self._protocol_fsm.start(SatlanticProtocolState.UNKNOWN)
self._add_response_handler(Command.GET, self._parse_get_response)
self._add_response_handler(Command.SHOW_ALL, self._parse_getAll_response)
self._add_response_handler(Command.SET, self._parse_set_response)
self._add_response_handler(Command.INVALID, self._parse_invalid_response)
self._param_dict.add(Parameter.MAX_RATE,
r"Maximum\ Frame\ Rate:\ (\S+).*?\s*",
lambda match: '0' if match.group(1) == 'AUTO' else match.group(1),
lambda sVal: '%s' % sVal,
type=ParameterDictType.STRING,
display_name="Maximum Frame Rate",
units=Units.HERTZ,
description="Frame rate: (0=auto | 0.125 | 0.25 | 0.5 | 1 | 2 | 4 | 8 | 10 | 12)",
default_value='0',
startup_param=True,
direct_access=True)
self._param_dict.add(Parameter.INIT_AT,
r"Initialize Automatic Telemetry: (off|on)",
lambda match: True if match.group(1) == 'on' else False,
self._boolean_to_off_on,
type=ParameterDictType.BOOL,
display_name="Auto Telemetry",
description="Enables auto telemetry: (true | false)",
default_value=True,
visibility=ParameterDictVisibility.IMMUTABLE,
startup_param=True,
direct_access=True)
self._param_dict.add(Parameter.INIT_SM,
r"Initialize Silent Mode: (off|on)",
lambda match: True if match.group(1) == 'on' else False,
self._boolean_to_off_on,
type=ParameterDictType.BOOL,
display_name="Silent Mode",
description="Enables silent mode: (true | false)",
default_value=True,
visibility=ParameterDictVisibility.IMMUTABLE,
startup_param=True,
direct_access=True)
self._param_dict.add(Parameter.NET_MODE,
r"Network Mode: (off|on)",
lambda match: True if match.group(1) == 'on' else False,
self._boolean_to_off_on,
type=ParameterDictType.BOOL,
display_name="Network Mode",
description="Enables network operation: (true | false)",
default_value=False,
visibility=ParameterDictVisibility.IMMUTABLE,
startup_param=True,
direct_access=True)
self._cmd_dict.add(SatlanticCapability.START_AUTOSAMPLE, display_name="Start Autosample")
self._cmd_dict.add(SatlanticCapability.STOP_AUTOSAMPLE, display_name="Stop Autosample")
self._cmd_dict.add(SatlanticCapability.ACQUIRE_STATUS, display_name="Acquire Status")
self._driver_dict.add(DriverDictKey.VENDOR_SW_COMPATIBLE, True)
self._chunker = StringChunker(self.sieve_function)
def _filter_capabilities(self, events):
"""
Filters capabilities
"""
events_out = [x for x in events if SatlanticCapability.has(x)]
return events_out
@staticmethod
def _boolean_to_off_on(v):
"""
Write a boolean value to string formatted for sbe16 set operations.
@param v a boolean value.
@retval A yes/no string formatted for sbe16 set operations.
@throws InstrumentParameterException if value not a bool.
"""
if not isinstance(v, bool):
raise InstrumentParameterException('Value %s is not a bool.' % str(v))
if v:
return 'on'
return 'off'
@staticmethod
def sieve_function(raw_data):
""" The method that splits samples
"""
log.debug("Raw Data: %r, len: %d", raw_data, len(raw_data))
log.debug(SAMPLE_REGEX.pattern)
matchers = [SAMPLE_REGEX, CONFIG_REGEX]
return_list = []
for matcher in matchers:
for match in matcher.finditer(raw_data):
return_list.append((match.start(), match.end()))
return return_list
def _filter_capabilities(self, events):
"""
"""
events_out = [x for x in events if SatlanticCapability.has(x)]
return events_out
def _do_cmd(self, cmd, *args, **kwargs):
"""
Issue a command to the instrument after clearing of buffers.
@param cmd The command to execute.
@param args positional arguments to pass to the build handler.
@retval The fully built command that was sent
"""
expected_prompt = kwargs.get('expected_prompt', None)
cmd_line = self._build_default_command(cmd, *args)
# Send command.
log.debug('_do_cmd: %s, length=%s' % (repr(cmd_line), len(cmd_line)))
if len(cmd_line) == 1:
self._connection.send(cmd_line)
else:
for char in cmd_line:
starttime = time.time()
self._connection.send(char)
while len(self._promptbuf) == 0 or char not in self._promptbuf[-1]:
time.sleep(0.0015)
if time.time() > starttime + 3:
break
time.sleep(0.115)
starttime = time.time()
self._connection.send(EOLN)
while EOLN not in self._promptbuf[len(cmd_line):len(cmd_line) + 2]:
time.sleep(0.0015)
if time.time() > starttime + 3:
break
# Limit resend_check_value from expected_prompt to one of the two below
resend_check_value = None
if expected_prompt is not None:
for check in (Prompt.COMMAND, "SATDI7"):
if check in expected_prompt:
log.trace('_do_cmd: command: %s, check=%s' % (cmd_line, check))
resend_check_value = check
# Resend the EOLN if it did not go through the first time
starttime = time.time()
if resend_check_value is not None:
while True:
time.sleep(0.1)
if time.time() > starttime + 2:
log.debug("Sending eoln again.")
self._connection.send(EOLN)
starttime = time.time()
if resend_check_value in self._promptbuf:
break
if Prompt.INVALID_COMMAND in self._promptbuf:
break
return cmd_line
def _do_cmd_no_resp(self, cmd, *args, **kwargs):
"""
Issue a command to the instrument after clearing of buffers. No response is handled as a result of the command.
@param cmd The command to execute.
@param args positional arguments to pass to the build handler.
"""
self._do_cmd(cmd, *args, **kwargs)
def _do_cmd_resp(self, cmd, *args, **kwargs):
"""
Perform a command-response on the device.
@param cmd The command to execute.
@param args positional arguments to pass to the build handler.
@param expected_prompt kwarg offering a specific prompt to look for
other than the ones in the protocol class itself.
@param response_regex kwarg with a compiled regex for the response to
match. Groups that match will be returned as a string.
Cannot be supplied with expected_prompt. May be helpful for instruments that do not have a prompt.
@retval resp_result The (possibly parsed) response result including the
first instance of the prompt matched. If a regex was used, the prompt
will be an empty string and the response will be the joined collection of matched groups.
@raises InstrumentCommandException if the response did not occur in time.
@raises InstrumentProtocolException if command could not be built or if response was not recognized.
"""
timeout = kwargs.get('timeout', DEFAULT_CMD_TIMEOUT)
response_regex = kwargs.get('response_regex', None)
expected_prompt = None
if response_regex is None:
expected_prompt = kwargs.get('expected_prompt', [Prompt.INVALID_COMMAND, Prompt.USAGE, Prompt.COMMAND])
if response_regex and not isinstance(response_regex, RE_PATTERN):
raise InstrumentProtocolException('Response regex is not a compiled pattern!')
if expected_prompt and response_regex:
raise InstrumentProtocolException('Cannot supply both regex and expected prompt!')
retry_count = 5
retry_num = 0
cmd_line = ""
result = ""
prompt = ""
for retry_num in xrange(retry_count):
# Clear line and prompt buffers for result.
self._linebuf = ''
self._promptbuf = ''
cmd_line = self._do_cmd(cmd, *args, **kwargs)
# Wait for the prompt, prepare result and return, timeout exception
if response_regex:
result_tuple = self._get_response(timeout, response_regex=response_regex,
expected_prompt=expected_prompt)
result = "".join(result_tuple)
else:
(prompt, result) = self._get_response(timeout, expected_prompt=expected_prompt)
# Confirm the entire command was sent, otherwise resend retry_count number of times
if len(cmd_line) > 1 and \
(expected_prompt is not None or
(response_regex is not None)) \
and cmd_line not in self._linebuf:
log.debug('_do_cmd_resp: Send command: %s failed %s attempt, result = %r', cmd, retry_num, result)
if retry_num >= retry_count:
raise InstrumentCommandException('_do_cmd_resp: Failed %s attempts sending command: %s' %
(retry_count, cmd))
else:
break
log.debug('_do_cmd_resp: Sent command: %s, %s reattempts, expected_prompt=%s, result=%r.',
cmd_line, retry_num, expected_prompt, result)
resp_handler = self._response_handlers.get((self.get_current_state(), cmd), None) or \
self._response_handlers.get(cmd, None)
resp_result = None
if resp_handler:
resp_result = resp_handler(result, prompt)
time.sleep(0.3) # give some time for the instrument connection to keep up
return resp_result
########################################################################
# Unknown handlers.
########################################################################
def _handler_unknown_enter(self, *args, **kwargs):
"""
Enter unknown state.
"""
# Tell driver superclass to send a state change event.
# Superclass will query the state.
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_unknown_discover(self, *args, **kwargs):
"""
Discover current state; can be COMMAND or AUTOSAMPLE.
@retval (next_state, result), (SatlanticProtocolState.COMMAND, ResourceAgentState.IDLE or
SatlanticProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING) if successful.
"""
try:
invalidCommandResponse = self._do_cmd_resp(Command.INVALID, timeout=3,
expected_prompt=Prompt.INVALID_COMMAND)
except InstrumentTimeoutException as ex:
invalidCommandResponse = None # The instrument is not in COMMAND: it must be polled or AUTOSAMPLE
log.debug("_handler_unknown_discover: returned: %s", invalidCommandResponse)
if invalidCommandResponse:
return SatlanticProtocolState.COMMAND, ResourceAgentState.IDLE
# Put the instrument back into full autosample
self._do_cmd_no_resp(Command.SWITCH_TO_AUTOSAMPLE)
return SatlanticProtocolState.AUTOSAMPLE, ResourceAgentState.STREAMING
########################################################################
# Command handlers.
########################################################################
def _handler_command_enter(self, *args, **kwargs):
"""
Enter command state.
"""
if self._init_type != InitializationType.NONE:
self._update_params()
# Command device to update parameters and send a config change event.
self._init_params()
# Tell driver superclass to send a state change event.
# Superclass will query the state.
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
def _handler_command_get(self, *args, **kwargs):
"""Handle getting data from command mode
@param params List of the parameters to pass to the state
@retval return (next state, result)
"""
return self._handler_get(*args, **kwargs)
def _handler_command_set(self, *args, **kwargs):
"""Handle setting data from command mode
@param params Dict of the parameters and values to pass to the state
@return (next state, result)
"""
self._set_params(*args, **kwargs)
return None, None
def _handler_command_start_autosample(self, params=None, *args, **kwargs):
"""
Handle getting an start autosample event when in command mode
@param params List of the parameters to pass to the state
@return next state (next agent state, result)
"""
result = None
self._do_cmd_resp(Command.EXIT, response_regex=SAMPLE_REGEX, timeout=30)
time.sleep(0.115)
# Ensure the instrument is free running sampling mode.
self._do_cmd_resp(Command.SWITCH_TO_AUTOSAMPLE, response_regex=SAMPLE_REGEX, timeout=30)
next_state = SatlanticProtocolState.AUTOSAMPLE
next_agent_state = ResourceAgentState.STREAMING
return next_state, (next_agent_state, result)
def _handler_command_start_direct(self):
"""
"""
result = None
next_state = SatlanticProtocolState.DIRECT_ACCESS
next_agent_state = ResourceAgentState.DIRECT_ACCESS
log.debug("_handler_command_start_direct: entering DA mode")
return next_state, (next_agent_state, result)
def _handler_command_acquire_status(self, *args, **kwargs):
"""
Handle SatlanticProtocolState.COMMAND SatlanticProtocolEvent.ACQUIRE_STATUS
@return next state (next agent state, result)
"""
next_state = None
next_agent_state = None
result = None
self._do_cmd_resp(Command.ID)
self._do_cmd_resp(Command.SHOW_ALL)
return next_state, (next_agent_state, result)
########################################################################
# Autosample handlers.
########################################################################
def _handler_autosample_enter(self, *args, **kwargs):
"""
Handle SatlanticProtocolState.AUTOSAMPLE SatlanticProtocolEvent.ENTER
@param params Parameters to pass to the state
@retval return (next state, result)
@throw InstrumentProtocolException For hardware error
"""
next_state = None
result = None
# Command device to update parameters only on initialization.
if self._init_type != InitializationType.NONE:
self._send_break()
self._update_params()
self._init_params()
self._do_cmd_resp(Command.EXIT, response_regex=SAMPLE_REGEX, timeout=30)
time.sleep(0.115)
self._do_cmd_resp(Command.SWITCH_TO_AUTOSAMPLE, response_regex=SAMPLE_REGEX, timeout=30)
if not self._confirm_autosample_mode:
raise InstrumentProtocolException(error_code=InstErrorCode.HARDWARE_ERROR,
msg="Not in the correct mode!")
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
return next_state, result
def _handler_autosample_stop_autosample(self, *args, **kwargs):
"""Handle SatlanticProtocolState.AUTOSAMPLE stop
@param params Parameters to pass to the state
@retval return (next state, result)
@throw InstrumentProtocolException For hardware error
"""
next_state = None
result = None
try:
self._send_break()
next_state = SatlanticProtocolState.COMMAND
next_agent_state = ResourceAgentState.COMMAND
except InstrumentException:
# Before raising an error, check if the instrument is already in Command state
next_state, next_agent_state = self._handler_unknown_discover()
if next_state != SatlanticProtocolState.COMMAND:
raise InstrumentProtocolException(error_code=InstErrorCode.HARDWARE_ERROR,
msg="Could not break from autosample!")
return next_state, (next_agent_state, result)
########################################################################
# Direct access handlers.
########################################################################
def _handler_direct_access_enter(self, *args, **kwargs):
"""
Enter direct access state.
Tell driver superclass to send a state change event.
Superclass will query the state.
"""
self._driver_event(DriverAsyncEvent.STATE_CHANGE)
self._sent_cmds = []
def _do_cmd_direct(self, cmd):
"""
Issue an untranslated command to the instrument. No response is handled as a result of the command.
Overridden: Use _do_cmd to send commands reliably. Remove if digi-serial interface is ever fixed.
@param cmd The high level command to issue
"""
self._do_cmd(cmd)
def _handler_direct_access_execute_direct(self, data):
"""
"""
next_state = None
result = None
next_agent_state = None
self._do_cmd_direct(data)
# add sent command to list for 'echo' filtering in callback
self._sent_cmds.append(data)
return next_state, (next_agent_state, result)
def _handler_direct_access_stop_direct(self):
"""
"""
next_state, next_agent_state = self._handler_unknown_discover()
if next_state == DriverProtocolState.COMMAND:
next_agent_state = ResourceAgentState.COMMAND
return next_state, (next_agent_state, None)
###################################################################
# Builders
###################################################################
def _build_default_command(self, *args):
"""
"""
return " ".join(str(x) for x in args)
##################################################################
# Response parsers
##################################################################
def _parse_set_response(self, response, prompt):
"""Determine if a set was successful or not
@param response What was sent back from the command that was sent
@param prompt The prompt that was returned from the device
"""
if prompt == Prompt.COMMAND:
return True
return False
def _parse_getAll_response(self, response, prompt):
""" Parse the response from the instrument for a 'Get All' query
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@return The numerical value of the parameter in the known units
@raise InstrumentProtocolException When a bad response is encountered
"""
self._param_dict.update_many(response)
return self._param_dict.get_all()
def _parse_get_response(self, response, prompt):
""" Parse the response from the instrument for a 'Get [parameter]' query
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@return The numerical value of the parameter in the known units
@raise InstrumentProtocolException When a bad response is encountered
"""
# should end with the response, an eol, and a prompt
update_dict = self._param_dict.update_many(response)
if not update_dict or len(update_dict) > 1:
log.error("Get response set multiple parameters (%r): expected only 1", update_dict)
raise InstrumentProtocolException("Invalid response. Bad command?")
return self._param_dict.get_all()
def _parse_invalid_response(self, response, prompt):
""" Parse the response from the instrument for a couple of different
query responses.
@param response The response string from the instrument
@param prompt The prompt received from the instrument
@return true iff Prompt.INVALID_COMMAND was returned
"""
# should end with the response, an eoln, and a prompt
return Prompt.INVALID_COMMAND == prompt
###################################################################
# Helpers
###################################################################
def _set_params(self, *args, **kwargs):
"""
Issue commands to the instrument to set various parameters
Also called when setting parameters during startup and direct access
In the event an exception is generated dur
@throws InstrumentParameterException if parameter does not exist or Maxrate is out of range
@throws InstrumentCommandException if failed to set
"""
params = args[0]
self._verify_not_readonly(*args, **kwargs)
old_config = self._param_dict.get_config()
exception = None
for key in params:
if key not in self._param_dict._param_dict:
exception = InstrumentParameterException ("Bad parameter: %r" % key)
break
val = self._param_dict.format(key, params[key])
log.debug("KEY = %s VALUE = %s", str(key), str(val))
if key == Parameter.MAX_RATE and float(params[key]) not in VALID_MAXRATES:
exception = InstrumentParameterException("Maxrate %s out of range" % val)
break
# Check for existence in dict (send only on change)
if self._param_dict.get(key) is None or val != self._param_dict.format(key):
if not self._do_cmd_resp(Command.SET, key, val):
exception = InstrumentCommandException('Error setting: %s = %s' % (key, val))
break
self._param_dict.set_value(key, params[key])
time.sleep(0.5)
# Get new param dict config. If it differs from the old config,
# tell driver superclass to publish a config change event.
new_config = self._param_dict.get_config()
log.debug("new_config: %s == old_config: %s", new_config, old_config)
if old_config != new_config:
self._do_cmd_resp(Command.SAVE, expected_prompt=Prompt.COMMAND)
log.debug("configuration has changed. Send driver event")
self._driver_event(DriverAsyncEvent.CONFIG_CHANGE)
# Raise any exceptions encountered due to errors setting the parameter(s)
if exception is not None:
raise exception
def _update_params(self, *args, **kwargs):
"""Fetch the parameters from the device, and update the param dict.
@param args Unused
@param kwargs Takes timeout value
"""
return self._do_cmd_resp(Command.SHOW_ALL)
def _send_break(self):
"""
Send break every 0.3 seconds until the Command Console banner is received.
@throws InstrumentTimeoutException if not Command Console banner not received within 5 seconds.
"""
self._promptbuf = ""
self._connection.send(Command.BREAK)
starttime = time.time()
resendtime = time.time()
while True:
if time.time() > resendtime + 0.3:
log.debug("Sending break again.")
self._connection.send(Command.BREAK)
resendtime = time.time()
if COMMAND_PATTERN in self._promptbuf:
break
if time.time() > starttime + 10:
raise InstrumentTimeoutException("Break command failing to stop autosample!")
time.sleep(0.1)
def _got_chunk(self, chunk, timestamp):
"""
extract samples from a chunk of data
@param chunk: bytes to parse into a sample.
"""
sample = self._extract_sample(self._data_particle_type, self._data_particle_regex, chunk, timestamp) or \
self._extract_sample(self._config_particle_type, self._config_particle_regex, chunk, timestamp)
if not sample:
raise InstrumentProtocolException(u'unhandled chunk received by _got_chunk: [{0!r:s}]'.format(chunk))
return sample
def _confirm_autosample_mode(self):
"""
Confirm we are in autosample mode.
This is done by waiting for a sample to come in, and confirming that
it does or does not.
@retval True if in autosample mode, False if not
"""
# timestamp now,
start_time = self._last_data_timestamp
# wait a sample period,
current_maxrate = self._param_dict.get_config()[Parameter.MAX_RATE]
if current_maxrate is None:
current_maxrate = 0.125 # During startup, assume the slowest sample rate
elif current_maxrate <= 0 or current_maxrate > 8:
current_maxrate = 8 # Effective current maxrate, despite the instrument accepting higher values
time_between_samples = (1.0 / current_maxrate) + 1
time.sleep(time_between_samples)
end_time = self._last_data_timestamp
log.debug("_confirm_autosample_mode: end_time=%s, start_time=%s" % (end_time, start_time))
if end_time != start_time:
log.debug("Confirmed in autosample mode")
return True
log.debug("Confirmed NOT in autosample mode")
return False
|
rmanoni/mi-instrument
|
mi/instrument/satlantic/ocr_507_icsw/ooicore/driver.py
|
Python
|
bsd-2-clause
| 47,671
|
#!/bin/env python
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# logging_utils.py
''' Utility functions and objects for logging.
'''
import logging
import sys
class StdoutStderrHandler(logging.Handler):
''' Subclass of logging.Handler which outputs to either stdout or stderr
based on a threshold level.
'''
def __init__(self, threshold=logging.WARNING, err=sys.stderr, out=sys.stdout):
''' Args:
threshold: below this logging level messages are sent to stdout,
otherwise they are sent to stderr
err: a stream object that error messages are sent to, defaults to
sys.stderr
out: a stream object that non-error messages are sent to, defaults to
sys.stdout
'''
logging.Handler.__init__(self)
self._err = logging.StreamHandler(err)
self._out = logging.StreamHandler(out)
self._threshold = threshold
self._last_was_err = False
def setLevel(self, lvl):
logging.Handler.setLevel(self, lvl)
self._err.setLevel(lvl)
self._out.setLevel(lvl)
def setFormatter(self, formatter):
logging.Handler.setFormatter(self, formatter)
self._err.setFormatter(formatter)
self._out.setFormatter(formatter)
def emit(self, record):
if record.levelno < self._threshold:
self._out.emit(record)
self._last_was_err = False
else:
self._err.emit(record)
self._last_was_err = False
def flush(self):
# preserve order on the flushing, the stalest stream gets flushed first
if self._last_was_err:
self._out.flush()
self._err.flush()
else:
self._err.flush()
self._out.flush()
FORMAT = "%(asctime)s %(filename)s [%(levelname)s] %(message)s"
DATEFMT = "%H:%M:%S"
def config_root(level=logging.INFO, threshold=logging.WARNING, format=FORMAT,
datefmt=DATEFMT):
''' Configure the root logger to use a StdoutStderrHandler and some default
formatting.
Args:
level: messages below this level are ignored
threshold: below this logging level messages are sent to stdout,
otherwise they are sent to stderr
format: format for log messages, see logger.Format
datefmt: format for date in log messages
'''
# to set the handler of the root logging object, we need to do setup
# manually rather than using basicConfig
root = logging.getLogger()
root.setLevel(level)
formatter = logging.Formatter(format, datefmt)
handler = StdoutStderrHandler(threshold=threshold)
handler.setLevel(level)
handler.setFormatter(formatter)
root.addHandler(handler)
|
Crystalnix/house-of-life-chromium
|
tools/python/google/logging_utils.py
|
Python
|
bsd-3-clause
| 2,700
|
from __future__ import division, print_function
import sys
sys.path.append("../lib")
import logging
import theano
import theano.tensor as T
from theano import tensor
from blocks.bricks.base import application, lazy
from blocks.bricks.recurrent import BaseRecurrent, recurrent
from blocks.bricks import Random, Initializable, MLP, Linear
from blocks.bricks import Identity, Tanh, Logistic
from attention import ZoomableAttentionWindow
from prob_layers import replicate_batch
#-----------------------------------------------------------------------------
class Qsampler(Initializable, Random):
def __init__(self, input_dim, output_dim, **kwargs):
super(Qsampler, self).__init__(**kwargs)
self.prior_mean = 0.
self.prior_log_sigma = 0.
self.mean_transform = Linear(
name=self.name+'_mean',
input_dim=input_dim, output_dim=output_dim,
weights_init=self.weights_init, biases_init=self.biases_init,
use_bias=True)
self.log_sigma_transform = Linear(
name=self.name+'_log_sigma',
input_dim=input_dim, output_dim=output_dim,
weights_init=self.weights_init, biases_init=self.biases_init,
use_bias=True)
self.children = [self.mean_transform, self.log_sigma_transform]
def get_dim(self, name):
if name == 'input':
return self.mean_transform.get_dim('input')
elif name == 'output':
return self.mean_transform.get_dim('output')
else:
raise ValueError
@application(inputs=['x', 'u'], outputs=['z', 'kl_term'])
def sample(self, x, u):
"""Return a samples and the corresponding KL term
Parameters
----------
x :
Returns
-------
z : tensor.matrix
Samples drawn from Q(z|x)
kl : tensor.vector
KL(Q(z|x) || P_z)
"""
mean = self.mean_transform.apply(x)
log_sigma = self.log_sigma_transform.apply(x)
# Sample from mean-zeros std.-one Gaussian
#u = self.theano_rng.normal(
# size=mean.shape,
# avg=0., std=1.)
# ... and scale/translate samples
z = mean + tensor.exp(log_sigma) * u
# Calculate KL
kl = (
self.prior_log_sigma - log_sigma
+ 0.5 * (
tensor.exp(2 * log_sigma) + (mean - self.prior_mean) ** 2
) / tensor.exp(2 * self.prior_log_sigma)
- 0.5
).sum(axis=-1)
return z, kl
#@application(inputs=['n_samples'])
@application(inputs=['u'], outputs=['z_prior'])
def sample_from_prior(self, u):
"""Sample z from the prior distribution P_z.
Parameters
----------
u : tensor.matrix
gaussian random source
Returns
-------
z : tensor.matrix
samples
"""
z_dim = self.mean_transform.get_dim('output')
# Sample from mean-zeros std.-one Gaussian
#u = self.theano_rng.normal(
# size=(n_samples, z_dim),
# avg=0., std=1.)
# ... and scale/translate samples
z = self.prior_mean + tensor.exp(self.prior_log_sigma) * u
#z.name("z_prior")
return z
#-----------------------------------------------------------------------------
class Reader(Initializable):
def __init__(self, x_dim, dec_dim, **kwargs):
super(Reader, self).__init__(name="reader", **kwargs)
self.x_dim = x_dim
self.dec_dim = dec_dim
self.output_dim = 2*x_dim
def get_dim(self, name):
if name == 'input':
return self.dec_dim
elif name == 'x_dim':
return self.x_dim
elif name == 'output':
return self.output_dim
else:
raise ValueError
@application(inputs=['x', 'x_hat', 'h_dec'], outputs=['r'])
def apply(self, x, x_hat, h_dec):
return T.concatenate([x, x_hat], axis=1)
class AttentionReader(Initializable):
def __init__(self, x_dim, dec_dim, channels, height, width, N, **kwargs):
super(AttentionReader, self).__init__(name="reader", **kwargs)
self.img_height = height
self.img_width = width
self.N = N
self.x_dim = x_dim
self.dec_dim = dec_dim
self.output_dim = 2*channels*N*N
self.zoomer = ZoomableAttentionWindow(channels, height, width, N)
self.readout = MLP(activations=[Identity()], dims=[dec_dim, 5], **kwargs)
self.children = [self.readout]
def get_dim(self, name):
if name == 'input':
return self.dec_dim
elif name == 'x_dim':
return self.x_dim
elif name == 'output':
return self.output_dim
else:
raise ValueError
@application(inputs=['x', 'x_hat', 'h_dec'], outputs=['r'])
def apply(self, x, x_hat, h_dec):
l = self.readout.apply(h_dec)
center_y, center_x, delta, sigma, gamma = self.zoomer.nn2att(l)
w = gamma * self.zoomer.read(x , center_y, center_x, delta, sigma)
w_hat = gamma * self.zoomer.read(x_hat, center_y, center_x, delta, sigma)
return T.concatenate([w, w_hat], axis=1)
@application(inputs=['x', 'x_hat', 'h_dec'], outputs=['r','center_y', 'center_x', 'delta'])
def apply_detailed(self, x, x_hat, h_dec):
l = self.readout.apply(h_dec)
center_y, center_x, delta, sigma, gamma = self.zoomer.nn2att(l)
w = gamma * self.zoomer.read(x , center_y, center_x, delta, sigma)
w_hat = gamma * self.zoomer.read(x_hat, center_y, center_x, delta, sigma)
r = T.concatenate([w, w_hat], axis=1)
return r, center_y, center_x, delta
#-----------------------------------------------------------------------------
class Writer(Initializable):
def __init__(self, input_dim, output_dim, **kwargs):
super(Writer, self).__init__(name="writer", **kwargs)
self.input_dim = input_dim
self.output_dim = output_dim
self.transform = Linear(
name=self.name+'_transform',
input_dim=input_dim, output_dim=output_dim,
weights_init=self.weights_init, biases_init=self.biases_init,
use_bias=True)
self.children = [self.transform]
@application(inputs=['h'], outputs=['c_update'])
def apply(self, h):
return self.transform.apply(h)
class AttentionWriter(Initializable):
def __init__(self, input_dim, output_dim, channels, width, height, N, **kwargs):
super(AttentionWriter, self).__init__(name="writer", **kwargs)
self.channels = channels
self.img_width = width
self.img_height = height
self.N = N
self.input_dim = input_dim
self.output_dim = output_dim
assert output_dim == channels*width*height
self.zoomer = ZoomableAttentionWindow(channels, height, width, N)
self.z_trafo = Linear(
name=self.name+'_ztrafo',
input_dim=input_dim, output_dim=5,
weights_init=self.weights_init, biases_init=self.biases_init,
use_bias=True)
self.w_trafo = Linear(
name=self.name+'_wtrafo',
input_dim=input_dim, output_dim=channels*N*N,
weights_init=self.weights_init, biases_init=self.biases_init,
use_bias=True)
self.children = [self.z_trafo, self.w_trafo]
@application(inputs=['h'], outputs=['c_update'])
def apply(self, h):
w = self.w_trafo.apply(h)
l = self.z_trafo.apply(h)
center_y, center_x, delta, sigma, gamma = self.zoomer.nn2att(l)
c_update = 1./gamma * self.zoomer.write(w, center_y, center_x, delta, sigma)
return c_update
@application(inputs=['h'], outputs=['c_update', 'center_y', 'center_x', 'delta'])
def apply_detailed(self, h):
w = self.w_trafo.apply(h)
l = self.z_trafo.apply(h)
center_y, center_x, delta, sigma, gamma = self.zoomer.nn2att(l)
c_update = 1./gamma * self.zoomer.write(w, center_y, center_x, delta, sigma)
return c_update, center_y, center_x, delta
#-----------------------------------------------------------------------------
class DrawModel(BaseRecurrent, Initializable, Random):
def __init__(self, n_iter, reader,
encoder_mlp, encoder_rnn, sampler,
decoder_mlp, decoder_rnn, writer, **kwargs):
super(DrawModel, self).__init__(**kwargs)
self.n_iter = n_iter
self.reader = reader
self.encoder_mlp = encoder_mlp
self.encoder_rnn = encoder_rnn
self.sampler = sampler
self.decoder_mlp = decoder_mlp
self.decoder_rnn = decoder_rnn
self.writer = writer
self.children = [self.reader, self.encoder_mlp, self.encoder_rnn, self.sampler,
self.writer, self.decoder_mlp, self.decoder_rnn]
def get_dim(self, name):
if name == 'c':
return self.reader.get_dim('x_dim')
elif name == 'h_enc':
return self.encoder_rnn.get_dim('states')
elif name == 'c_enc':
return self.encoder_rnn.get_dim('cells')
elif name in ['z', 'z_mean', 'z_log_sigma']:
return self.sampler.get_dim('output')
elif name == 'h_dec':
return self.decoder_rnn.get_dim('states')
elif name == 'c_dec':
return self.decoder_rnn.get_dim('cells')
elif name == 'kl':
return 0
elif name == 'center_y':
return 0
elif name == 'center_x':
return 0
elif name == 'delta':
return 0
else:
super(DrawModel, self).get_dim(name)
#------------------------------------------------------------------------
@recurrent(sequences=['u'], contexts=['x'],
states=['c', 'h_enc', 'c_enc', 'z', 'kl', 'h_dec', 'c_dec'],
outputs=['c', 'h_enc', 'c_enc', 'z', 'kl', 'h_dec', 'c_dec', 'center_y', 'center_x', 'delta'])
def apply(self, u, c, h_enc, c_enc, z, kl, h_dec, c_dec, x):
x_hat = x-T.nnet.sigmoid(c)
#r = self.reader.apply(x, x_hat, h_dec)
r,center_y,center_x,delta = self.reader.apply_detailed(x, x_hat, h_dec)
i_enc = self.encoder_mlp.apply(T.concatenate([r, h_dec], axis=1))
h_enc, c_enc = self.encoder_rnn.apply(states=h_enc, cells=c_enc, inputs=i_enc, iterate=False)
z, kl = self.sampler.sample(h_enc, u)
i_dec = self.decoder_mlp.apply(z)
h_dec, c_dec = self.decoder_rnn.apply(states=h_dec, cells=c_dec, inputs=i_dec, iterate=False)
c = c + self.writer.apply(h_dec)
return c, h_enc, c_enc, z, kl, h_dec, c_dec, center_y, center_x, delta
@recurrent(sequences=['u'], contexts=[],
states=['c', 'h_dec', 'c_dec'],
outputs=['c', 'h_dec', 'c_dec'])
def decode(self, u, c, h_dec, c_dec):
batch_size = c.shape[0]
z = self.sampler.sample_from_prior(u)
i_dec = self.decoder_mlp.apply(z)
h_dec, c_dec = self.decoder_rnn.apply(
states=h_dec, cells=c_dec,
inputs=i_dec, iterate=False)
c = c + self.writer.apply(h_dec)
return c, h_dec, c_dec
#------------------------------------------------------------------------
@application(inputs=['features'], outputs=['recons', 'kl', 'h_enc', 'center_y', 'center_x', 'delta'])
def reconstruct(self, features):
batch_size = features.shape[0]
dim_z = self.get_dim('z')
# Sample from mean-zeros std.-one Gaussian
u = self.theano_rng.normal(
size=(self.n_iter, batch_size, dim_z),
avg=0., std=1.)
c, h_enc, c_enc, z, kl, h_dec, c_dec, center_y, center_x, delta = \
rvals = self.apply(x=features, u=u)
x_recons = T.nnet.sigmoid(c[-1,:,:])
x_recons.name = "reconstruction"
kl.name = "kl"
return x_recons, kl, h_enc, center_y, center_x, delta
@application(inputs=['n_samples'], outputs=['samples'])
def sample(self, n_samples):
"""Sample from model.
Returns
-------
samples : tensor3 (n_samples, n_iter, x_dim)
"""
# Sample from mean-zeros std.-one Gaussian
u_dim = self.sampler.mean_transform.get_dim('output')
u = self.theano_rng.normal(
size=(self.n_iter, n_samples, u_dim),
avg=0., std=1.)
c, _, _, = self.decode(u)
#c, _, _, center_y, center_x, delta = self.decode(u)
return T.nnet.sigmoid(c)
|
drewlinsley/draw_classify
|
draw/draw_backup.py
|
Python
|
mit
| 12,934
|
from nose.tools import assert_true
import numpy as np
import numpy.testing as npt
from dipy.data import get_data
from dipy.core.gradients import (gradient_table, GradientTable,
gradient_table_from_bvals_bvecs)
from dipy.io.gradients import read_bvals_bvecs
def test_btable_prepare():
sq2=np.sqrt(2)/2.
bvals=1500*np.ones(7)
bvals[0]=0
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
bt = gradient_table(bvals, bvecs)
npt.assert_array_equal(bt.bvecs, bvecs)
bt.info
fimg, fbvals, fbvecs = get_data('small_64D')
bvals = np.load(fbvals)
bvecs = np.load(fbvecs)
bvecs = np.where(np.isnan(bvecs), 0, bvecs)
bt = gradient_table(bvals,bvecs)
npt.assert_array_equal(bt.bvecs,bvecs)
bt2 = gradient_table(bvals,bvecs.T)
npt.assert_array_equal(bt2.bvecs, bvecs)
btab = np.concatenate((bvals[:,None], bvecs),axis=1)
bt3 = gradient_table(btab)
npt.assert_array_equal(bt3.bvecs, bvecs)
npt.assert_array_equal(bt3.bvals, bvals)
bt4 = gradient_table(btab.T)
npt.assert_array_equal(bt4.bvecs, bvecs)
npt.assert_array_equal(bt4.bvals, bvals)
def test_GradientTable():
gradients = np.array([[0, 0, 0],
[1, 0, 0],
[0, 0, 1],
[3, 4, 0],
[5, 0, 12]], 'float')
expected_bvals = np.array([0, 1, 1, 5, 13])
expected_b0s_mask = expected_bvals == 0
expected_bvecs = gradients / (expected_bvals + expected_b0s_mask)[:, None]
gt = GradientTable(gradients, b0_threshold=0)
npt.assert_array_almost_equal(gt.bvals, expected_bvals)
npt.assert_array_equal(gt.b0s_mask, expected_b0s_mask)
npt.assert_array_almost_equal(gt.bvecs, expected_bvecs)
npt.assert_array_almost_equal(gt.gradients, gradients)
gt = GradientTable(gradients, b0_threshold=1)
npt.assert_array_equal(gt.b0s_mask, [1, 1, 1, 0, 0])
npt.assert_array_equal(gt.bvals, expected_bvals)
npt.assert_array_equal(gt.bvecs, expected_bvecs)
npt.assert_raises(ValueError, GradientTable, np.ones((6, 2)))
npt.assert_raises(ValueError, GradientTable, np.ones((6,)))
def test_gradient_table_from_bvals_bvecs():
sq2 = np.sqrt(2) / 2
bvals = [0, 1, 2, 3, 4, 5, 6, 0]
bvecs=np.array([[0, 0, 0],\
[1, 0, 0],\
[0, 1, 0],\
[0, 0, 1],\
[sq2, sq2, 0],\
[sq2, 0, sq2],\
[0, sq2, sq2],\
[0, 0, 0]])
gt = gradient_table_from_bvals_bvecs(bvals, bvecs, b0_threshold=0)
npt.assert_array_equal(gt.bvecs, bvecs)
npt.assert_array_equal(gt.bvals, bvals)
npt.assert_array_equal(gt.gradients, np.reshape(bvals, (-1, 1)) * bvecs)
npt.assert_array_equal(gt.b0s_mask, [1, 0, 0, 0, 0, 0, 0, 1])
# Test nans are replaced by 0
new_bvecs = bvecs.copy()
new_bvecs[[0, -1]] = np.nan
gt = gradient_table_from_bvals_bvecs(bvals, new_bvecs, b0_threshold=0)
npt.assert_array_equal(gt.bvecs, bvecs)
# Bvalue > 0 for non-unit vector
bad_bvals = [2, 1, 2, 3, 4, 5, 6, 0]
npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bad_bvals,
bvecs, b0_threshold=0.)
# num_gard inconsistent bvals, bvecs
bad_bvals = np.ones(7)
npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bad_bvals,
bvecs, b0_threshold=0.)
# bvals not 1d
bad_bvals = np.ones((1, 8))
npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bad_bvals,
bvecs, b0_threshold=0.)
# bvec not 2d
bad_bvecs = np.ones((1, 8, 3))
npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bvals,
bad_bvecs, b0_threshold=0.)
# bvec not (N, 3)
bad_bvecs = np.ones((8, 2))
npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bvals,
bad_bvecs, b0_threshold=0.)
# bvecs not unit vectors
bad_bvecs = bvecs * 2
npt.assert_raises(ValueError, gradient_table_from_bvals_bvecs, bvals,
bad_bvecs, b0_threshold=0.)
# Test **kargs get passed along
gt = gradient_table_from_bvals_bvecs(bvals, bvecs, b0_threshold=0,
big_delta=5, small_delta=2)
npt.assert_equal(gt.big_delta, 5)
npt.assert_equal(gt.small_delta, 2)
def test_b0s():
sq2=np.sqrt(2)/2.
bvals=1500*np.ones(8)
bvals[0]=0
bvals[7]=0
bvecs=np.array([[0, 0, 0],\
[1, 0, 0],\
[0, 1, 0],\
[0, 0, 1],\
[sq2, sq2, 0],\
[sq2, 0, sq2],\
[0, sq2, sq2],\
[0, 0, 0]])
bt = gradient_table(bvals,bvecs)
npt.assert_array_equal(np.where(bt.b0s_mask>0)[0], np.array([0,7]))
npt.assert_array_equal(np.where(bt.b0s_mask==0)[0], np.arange(1,7))
def test_gtable_from_files():
fimg, fbvals, fbvecs = get_data('small_101D')
gt = gradient_table(fbvals, fbvecs)
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
npt.assert_array_equal(gt.bvals, bvals)
npt.assert_array_equal(gt.bvecs, bvecs)
def test_deltas():
sq2=np.sqrt(2)/2.
bvals=1500*np.ones(7)
bvals[0]=0
bvecs=np.array([[0, 0, 0],
[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[sq2, sq2, 0],
[sq2, 0, sq2],
[0, sq2, sq2]])
bt = gradient_table(bvals, bvecs, big_delta=5, small_delta=2)
npt.assert_equal(bt.big_delta, 5)
npt.assert_equal(bt.small_delta, 2)
if __name__ == "__main__":
from numpy.testing import run_module_suite
run_module_suite()
|
samuelstjean/dipy
|
dipy/core/tests/test_gradients.py
|
Python
|
bsd-3-clause
| 5,975
|
o = object()
if callable(o):
o(42, 3.14)
<warning descr="'o' is not callable">o(-1)</warning>
|
ivan-fedorov/intellij-community
|
python/testData/inspections/PyCallingNonCallableInspection/callableCheck.py
|
Python
|
apache-2.0
| 98
|
import os
import sys
from distutils.core import setup
from distutils.sysconfig import get_python_lib
# Warn if we are installing over top of an existing installation. This can
# cause issues where files that were deleted from a more recent Django are
# still present in site-packages. See #18115.
overlay_warning = False
if "install" in sys.argv:
# We have to try also with an explicit prefix of /usr/local in order to
# catch Debian's custom user site-packages directory.
for lib_path in get_python_lib(), get_python_lib(prefix="/usr/local"):
existing_path = os.path.abspath(os.path.join(lib_path, "django"))
if os.path.exists(existing_path):
# We note the need for the warning here, but present it after the
# command is run, so it's more likely to be seen.
overlay_warning = True
break
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join)
in a platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
EXCLUDE_FROM_PACKAGES = ['django.conf.project_template',
'django.conf.app_template',
'django.bin']
def is_package(package_name):
for pkg in EXCLUDE_FROM_PACKAGES:
if package_name.startswith(pkg):
return False
return True
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, package_data = [], {}
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
django_dir = 'django'
for dirpath, dirnames, filenames in os.walk(django_dir):
# Ignore PEP 3147 cache dirs and those whose names start with '.'
dirnames[:] = [d for d in dirnames if not d.startswith('.') and d != '__pycache__']
parts = fullsplit(dirpath)
package_name = '.'.join(parts)
if '__init__.py' in filenames and is_package(package_name):
packages.append(package_name)
elif filenames:
relative_path = []
while '.'.join(parts) not in packages:
relative_path.append(parts.pop())
relative_path.reverse()
path = os.path.join(*relative_path)
package_files = package_data.setdefault('.'.join(parts), [])
package_files.extend([os.path.join(path, f) for f in filenames])
# Dynamically calculate the version based on django.VERSION.
version = __import__('django').get_version()
setup(
name='Django',
version=version,
url='http://www.djangoproject.com/',
author='Django Software Foundation',
author_email='foundation@djangoproject.com',
description=('A high-level Python Web framework that encourages '
'rapid development and clean, pragmatic design.'),
download_url='https://www.djangoproject.com/m/releases/1.5/Django-1.5.8.tar.gz',
license='BSD',
packages=packages,
package_data=package_data,
scripts=['django/bin/django-admin.py'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
if overlay_warning:
sys.stderr.write("""
========
WARNING!
========
You have just installed Django over top of an existing
installation, without removing it first. Because of this,
your install may now include extraneous files from a
previous version that have since been removed from
Django. This is known to cause a variety of problems. You
should manually remove the
%(existing_path)s
directory and re-install Django.
""" % {"existing_path": existing_path})
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/lib/django-1.5/setup.py
|
Python
|
bsd-3-clause
| 4,542
|
"""Database module, including the SQLAlchemy database object and DB-related
utilities.
"""
from sqlalchemy.orm import relationship
import datetime
from .extensions import db
from .compat import basestring
# Alias common SQLAlchemy names
Column = db.Column
class CRUDMixin(object):
"""Mixin that adds convenience methods for CRUD (create, read, update, delete)
operations.
"""
@classmethod
def create(cls, **kwargs):
"""Create a new record and save it the database."""
instance = cls(**kwargs)
return instance.save()
def update(self, commit=True, **kwargs):
"""Update specific fields of a record."""
for attr, value in kwargs.iteritems():
setattr(self, attr, value)
return commit and self.save() or self
def save(self, commit=True):
"""Save the record."""
db.session.add(self)
if commit:
try:
db.session.commit()
except:
db.session.rollback()
raise
return self
def delete(self, commit=True):
"""Remove the record from the database."""
db.session.delete(self)
if commit:
try:
return db.session.commit()
except:
db.session.rollback()
raise
return False
class Model(CRUDMixin, db.Model):
"""Base model class that includes CRUD convenience methods."""
__abstract__ = True
# From Mike Bayer's "Building the app" talk
# https://speakerdeck.com/zzzeek/building-the-app
class SurrogatePK(object):
"""A mixin that adds a surrogate integer 'primary key' column named
``id`` to any declarative-mapped class.
"""
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True)
@classmethod
def get_by_id(cls, id):
if any(
(isinstance(id, basestring) and id.isdigit(),
isinstance(id, (int, float))),
):
return cls.query.get(int(id))
return None
class TimeMixin(object):
"""A mixin that adds a creation and update time columns named
``created_time`` and ``last_update_time`` to any declarative-mapped class.
"""
__table_args__ = {'extend_existing': True}
created_time = Column(db.DateTime(), default=datetime.datetime.utcnow)
last_update_time = Column(
db.DateTime(), default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow)
def ReferenceCol(tablename, nullable=True, pk_name='id', **kwargs):
"""Column that adds primary key foreign key reference.
Usage: ::
category_id = ReferenceCol('category')
category = relationship('Category', backref='categories')
"""
return db.Column(
db.ForeignKey("{0}.{1}".format(tablename, pk_name)),
nullable=nullable, **kwargs)
|
tyler274/Recruitment-App
|
recruit_app/database.py
|
Python
|
bsd-3-clause
| 2,879
|
import os
import unittest
import yaml
from pyvcloud.vcd.client import BasicLoginCredentials
from pyvcloud.vcd.client import Client
from pyvcloud.vcd.org import Org
from pyvcloud.vcd.test import TestCase
class UpdateCatalog(TestCase):
def test_create_catalog(self):
logged_in_org = self.client.get_org()
org = Org(self.client, resource=logged_in_org)
catalog = org.create_catalog(self.config['vcd']['catalog'], 'test catalog')
assert self.config['vcd']['catalog'] == catalog.get('name')
def test_update_catalog(self):
logged_in_org = self.client.get_org()
org = Org(self.client, resource=logged_in_org)
catalog = org.update_catalog(self.config['vcd']['catalog'],
self.config['vcd']['new_name'], self.config['vcd']['new_desc'])
assert self.config['vcd']['new_name'] == catalog.get('name')
assert self.config['vcd']['new_desc'] == catalog['Description']
if __name__ == '__main__':
unittest.main()
|
pacogomez/pyvcloud
|
tests/vcd_catalog_update.py
|
Python
|
apache-2.0
| 993
|
from kalibro_client.base import attributes_class_constructor, \
entity_name_decorator
from kalibro_client.processor.base import Base
from kalibro_client.miscellaneous import NativeMetric
from kalibro_client.errors import KalibroClientNotFoundError, KalibroClientRequestError
@entity_name_decorator
class MetricCollectorDetails(attributes_class_constructor('MetricCollectorDetailsAttr',
(('name', None),
('description', None)),
identity=False),
Base):
def __init__(self, supported_metrics=None, *init_args, **init_kwargs):
super(MetricCollectorDetails, self).__init__(*init_args, **init_kwargs)
self.supported_metrics = supported_metrics
def _asdict(self):
dict_ = super(MetricCollectorDetails, self)._asdict()
dict_['supported_metrics'] = self.supported_metrics
return dict_
@property
def supported_metrics(self):
return self._supported_metrics
@supported_metrics.setter
def supported_metrics(self, value):
self._supported_metrics = {}
if value is not None:
for code, metric in value.iteritems():
self._supported_metrics[code] = NativeMetric(**metric) if not isinstance(metric, NativeMetric) else metric
def find_metric_by_name(self, name):
for code, metric in self.supported_metrics.iteritems():
if metric.name == name:
return metric
@classmethod
def find_by_name(cls, name):
try:
response = cls.request('find', params={"name": name})
return cls(**response['metric_collector_details'])
except KalibroClientRequestError as error:
error_messages = error.response.json().get('error', None)
raise KalibroClientNotFoundError(error_messages)
@classmethod
def all_names(cls):
return cls.request('names', method='get')['metric_collector_names']
@classmethod
def all(cls):
return cls.array_to_objects_array(cls.request('', method='get'))
|
mezuro/kalibro_client_py
|
kalibro_client/processor/metric_collector_details.py
|
Python
|
lgpl-3.0
| 2,205
|
def score(arr):
return (sum(x*0.8**i for i,x in enumerate(arr))/5)
def main():
n = int(input())
scores = [int(input()) for _ in range(n)]
print('%.6f' % score(scores))
print('%.6f' % (sum(score(scores[:i]+scores[i+1:]) for i in range(n))/n))
if __name__ == "__main__":
main()
|
JonSteinn/Kattis-Solutions
|
src/School Spirit/Python 3/main.py
|
Python
|
gpl-3.0
| 301
|
import codecs
import math
import sys
tupl = [];
lista = [];
fileName = sys.argv[1]
outputFile = codecs.open(fileName, encoding='utf-8', mode='r')
data = ""
while True:
c = outputFile.read(1)
if c is None or len(c) == 0:
break;
index = ord(c)
if index == 0:
index = 0
size = 0
elif index % 2 == 1:
index = index/2
size = ord(outputFile.read(1))
else:
index = index/2
size = 1
letter = outputFile.read(1)
data += data[index:index+size] + letter
outputFile = codecs.open(fileName.strip('.lz77') + ".decompressed77", encoding='utf-8', mode='w')
outputFile.write(data)
#print data
# if index == 0:
# lista.append(letter)
# else:
# prefix = lista[index-1]
# lista.append(prefix+letter)
# string = ""
# for i in lista:
# string += i;
# print string
|
hpanago/LZ77-LZ78
|
lz77/unlz77.py
|
Python
|
gpl-2.0
| 791
|
"""AMQP Connections"""
# Copyright (C) 2007-2008 Barry Pederson <bp@barryp.org>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
from __future__ import absolute_import
import logging
import socket
from array import array
try:
from ssl import SSLError
except ImportError:
class SSLError(Exception): # noqa
pass
from . import __version__
from .abstract_channel import AbstractChannel
from .channel import Channel
from .exceptions import (
AMQPNotImplementedError, ChannelError, ResourceError,
ConnectionForced, ConnectionError, error_for_code,
RecoverableConnectionError, RecoverableChannelError,
)
from .five import items, range, values, monotonic
from .method_framing import MethodReader, MethodWriter
from .serialization import AMQPWriter
from .transport import create_transport
HAS_MSG_PEEK = hasattr(socket, 'MSG_PEEK')
START_DEBUG_FMT = """
Start from server, version: %d.%d, properties: %s, mechanisms: %s, locales: %s
""".strip()
__all__ = ['Connection']
#
# Client property info that gets sent to the server on connection startup
#
LIBRARY_PROPERTIES = {
'product': 'py-amqp',
'product_version': __version__,
'capabilities': {},
}
AMQP_LOGGER = logging.getLogger('amqp')
class Connection(AbstractChannel):
"""The connection class provides methods for a client to establish a
network connection to a server, and for both peers to operate the
connection thereafter.
GRAMMAR::
connection = open-connection *use-connection close-connection
open-connection = C:protocol-header
S:START C:START-OK
*challenge
S:TUNE C:TUNE-OK
C:OPEN S:OPEN-OK
challenge = S:SECURE C:SECURE-OK
use-connection = *channel
close-connection = C:CLOSE S:CLOSE-OK
/ S:CLOSE C:CLOSE-OK
"""
Channel = Channel
#: Final heartbeat interval value (in float seconds) after negotiation
heartbeat = None
#: Original heartbeat interval value proposed by client.
client_heartbeat = None
#: Original heartbeat interval proposed by server.
server_heartbeat = None
#: Time of last heartbeat sent (in monotonic time, if available).
last_heartbeat_sent = 0
#: Time of last heartbeat received (in monotonic time, if available).
last_heartbeat_received = 0
#: Number of bytes sent to socket at the last heartbeat check.
prev_sent = None
#: Number of bytes received from socket at the last heartbeat check.
prev_recv = None
def __init__(self, host='localhost', userid='guest', password='guest',
login_method='AMQPLAIN', login_response=None,
virtual_host='/', locale='en_US', client_properties=None,
ssl=False, connect_timeout=None, channel_max=None,
frame_max=None, heartbeat=0, on_blocked=None,
on_unblocked=None, confirm_publish=False, **kwargs):
"""Create a connection to the specified host, which should be
a 'host[:port]', such as 'localhost', or '1.2.3.4:5672'
(defaults to 'localhost', if a port is not specified then
5672 is used)
If login_response is not specified, one is built up for you from
userid and password if they are present.
The 'ssl' parameter may be simply True/False, or for Python >= 2.6
a dictionary of options to pass to ssl.wrap_socket() such as
requiring certain certificates.
"""
channel_max = channel_max or 65535
frame_max = frame_max or 131072
if (login_response is None) \
and (userid is not None) \
and (password is not None):
login_response = AMQPWriter()
login_response.write_table({'LOGIN': userid, 'PASSWORD': password})
login_response = login_response.getvalue()[4:] # Skip the length
# at the beginning
d = dict(LIBRARY_PROPERTIES, **client_properties or {})
self._method_override = {(60, 50): self._dispatch_basic_return}
self.channels = {}
# The connection object itself is treated as channel 0
super(Connection, self).__init__(self, 0)
self.transport = None
# Properties set in the Tune method
self.channel_max = channel_max
self.frame_max = frame_max
self.client_heartbeat = heartbeat
self.confirm_publish = confirm_publish
# Callbacks
self.on_blocked = on_blocked
self.on_unblocked = on_unblocked
self._avail_channel_ids = array('H', range(self.channel_max, 0, -1))
# Properties set in the Start method
self.version_major = 0
self.version_minor = 0
self.server_properties = {}
self.mechanisms = []
self.locales = []
# Let the transport.py module setup the actual
# socket connection to the broker.
#
self.transport = create_transport(host, connect_timeout, ssl)
self.method_reader = MethodReader(self.transport)
self.method_writer = MethodWriter(self.transport, self.frame_max)
self.wait(allowed_methods=[
(10, 10), # start
])
self._x_start_ok(d, login_method, login_response, locale)
self._wait_tune_ok = True
while self._wait_tune_ok:
self.wait(allowed_methods=[
(10, 20), # secure
(10, 30), # tune
])
return self._x_open(virtual_host)
@property
def connected(self):
return self.transport and self.transport.connected
def _do_close(self):
try:
self.transport.close()
temp_list = [x for x in values(self.channels) if x is not self]
for ch in temp_list:
ch._do_close()
except socket.error:
pass # connection already closed on the other end
finally:
self.transport = self.connection = self.channels = None
def _get_free_channel_id(self):
try:
return self._avail_channel_ids.pop()
except IndexError:
raise ResourceError(
'No free channel ids, current={0}, channel_max={1}'.format(
len(self.channels), self.channel_max), (20, 10))
def _claim_channel_id(self, channel_id):
try:
return self._avail_channel_ids.remove(channel_id)
except ValueError:
raise ConnectionError(
'Channel %r already open' % (channel_id, ))
def _wait_method(self, channel_id, allowed_methods):
"""Wait for a method from the server destined for
a particular channel."""
#
# Check the channel's deferred methods
#
method_queue = self.channels[channel_id].method_queue
for queued_method in method_queue:
method_sig = queued_method[0]
if (allowed_methods is None) \
or (method_sig in allowed_methods) \
or (method_sig == (20, 40)):
method_queue.remove(queued_method)
return queued_method
#
# Nothing queued, need to wait for a method from the peer
#
while 1:
channel, method_sig, args, content = \
self.method_reader.read_method()
if channel == channel_id and (
allowed_methods is None or
method_sig in allowed_methods or
method_sig == (20, 40)):
return method_sig, args, content
#
# Certain methods like basic_return should be dispatched
# immediately rather than being queued, even if they're not
# one of the 'allowed_methods' we're looking for.
#
if channel and method_sig in self.Channel._IMMEDIATE_METHODS:
self.channels[channel].dispatch_method(
method_sig, args, content,
)
continue
#
# Not the channel and/or method we were looking for. Queue
# this method for later
#
self.channels[channel].method_queue.append(
(method_sig, args, content),
)
#
# If we just queued up a method for channel 0 (the Connection
# itself) it's probably a close method in reaction to some
# error, so deal with it right away.
#
if not channel:
self.wait()
def channel(self, channel_id=None):
"""Fetch a Channel object identified by the numeric channel_id, or
create that object if it doesn't already exist."""
try:
return self.channels[channel_id]
except KeyError:
return self.Channel(self, channel_id)
def is_alive(self):
if HAS_MSG_PEEK:
sock = self.sock
prev = sock.gettimeout()
sock.settimeout(0.0001)
try:
sock.recv(1, socket.MSG_PEEK)
except socket.timeout:
pass
except socket.error:
return False
finally:
sock.settimeout(prev)
return True
def drain_events(self, timeout=None):
"""Wait for an event on a channel."""
chanmap = self.channels
chanid, method_sig, args, content = self._wait_multiple(
chanmap, None, timeout=timeout,
)
channel = chanmap[chanid]
if (content and
channel.auto_decode and
hasattr(content, 'content_encoding')):
try:
content.body = content.body.decode(content.content_encoding)
except Exception:
pass
amqp_method = (self._method_override.get(method_sig) or
channel._METHOD_MAP.get(method_sig, None))
if amqp_method is None:
raise AMQPNotImplementedError(
'Unknown AMQP method {0!r}'.format(method_sig))
if content is None:
return amqp_method(channel, args)
else:
return amqp_method(channel, args, content)
def read_timeout(self, timeout=None):
if timeout is None:
return self.method_reader.read_method()
sock = self.sock
prev = sock.gettimeout()
if prev != timeout:
sock.settimeout(timeout)
try:
try:
return self.method_reader.read_method()
except SSLError as exc:
# http://bugs.python.org/issue10272
if 'timed out' in str(exc):
raise socket.timeout()
# Non-blocking SSL sockets can throw SSLError
if 'The operation did not complete' in str(exc):
raise socket.timeout()
raise
finally:
if prev != timeout:
sock.settimeout(prev)
def _wait_multiple(self, channels, allowed_methods, timeout=None):
for channel_id, channel in items(channels):
method_queue = channel.method_queue
for queued_method in method_queue:
method_sig = queued_method[0]
if (allowed_methods is None or
method_sig in allowed_methods or
method_sig == (20, 40)):
method_queue.remove(queued_method)
method_sig, args, content = queued_method
return channel_id, method_sig, args, content
# Nothing queued, need to wait for a method from the peer
read_timeout = self.read_timeout
wait = self.wait
while 1:
channel, method_sig, args, content = read_timeout(timeout)
if channel in channels and (
allowed_methods is None or
method_sig in allowed_methods or
method_sig == (20, 40)):
return channel, method_sig, args, content
# Not the channel and/or method we were looking for. Queue
# this method for later
channels[channel].method_queue.append((method_sig, args, content))
#
# If we just queued up a method for channel 0 (the Connection
# itself) it's probably a close method in reaction to some
# error, so deal with it right away.
#
if channel == 0:
wait()
def _dispatch_basic_return(self, channel, args, msg):
reply_code = args.read_short()
reply_text = args.read_shortstr()
exchange = args.read_shortstr()
routing_key = args.read_shortstr()
exc = error_for_code(reply_code, reply_text, (50, 60), ChannelError)
handlers = channel.events.get('basic_return')
if not handlers:
raise exc
for callback in handlers:
callback(exc, exchange, routing_key, msg)
def close(self, reply_code=0, reply_text='', method_sig=(0, 0)):
"""Request a connection close
This method indicates that the sender wants to close the
connection. This may be due to internal conditions (e.g. a
forced shut-down) or due to an error handling a specific
method, i.e. an exception. When a close is due to an
exception, the sender provides the class and method id of the
method which caused the exception.
RULE:
After sending this method any received method except the
Close-OK method MUST be discarded.
RULE:
The peer sending this method MAY use a counter or timeout
to detect failure of the other peer to respond correctly
with the Close-OK method.
RULE:
When a server receives the Close method from a client it
MUST delete all server-side resources associated with the
client's context. A client CANNOT reconnect to a context
after sending or receiving a Close method.
PARAMETERS:
reply_code: short
The reply code. The AMQ reply codes are defined in AMQ
RFC 011.
reply_text: shortstr
The localised reply text. This text can be logged as an
aid to resolving issues.
class_id: short
failing method class
When the close is provoked by a method exception, this
is the class of the method.
method_id: short
failing method ID
When the close is provoked by a method exception, this
is the ID of the method.
"""
if self.transport is None:
# already closed
return
args = AMQPWriter()
args.write_short(reply_code)
args.write_shortstr(reply_text)
args.write_short(method_sig[0]) # class_id
args.write_short(method_sig[1]) # method_id
self._send_method((10, 50), args)
return self.wait(allowed_methods=[
(10, 50), # Connection.close
(10, 51), # Connection.close_ok
])
def _close(self, args):
"""Request a connection close
This method indicates that the sender wants to close the
connection. This may be due to internal conditions (e.g. a
forced shut-down) or due to an error handling a specific
method, i.e. an exception. When a close is due to an
exception, the sender provides the class and method id of the
method which caused the exception.
RULE:
After sending this method any received method except the
Close-OK method MUST be discarded.
RULE:
The peer sending this method MAY use a counter or timeout
to detect failure of the other peer to respond correctly
with the Close-OK method.
RULE:
When a server receives the Close method from a client it
MUST delete all server-side resources associated with the
client's context. A client CANNOT reconnect to a context
after sending or receiving a Close method.
PARAMETERS:
reply_code: short
The reply code. The AMQ reply codes are defined in AMQ
RFC 011.
reply_text: shortstr
The localised reply text. This text can be logged as an
aid to resolving issues.
class_id: short
failing method class
When the close is provoked by a method exception, this
is the class of the method.
method_id: short
failing method ID
When the close is provoked by a method exception, this
is the ID of the method.
"""
reply_code = args.read_short()
reply_text = args.read_shortstr()
class_id = args.read_short()
method_id = args.read_short()
self._x_close_ok()
raise error_for_code(reply_code, reply_text,
(class_id, method_id), ConnectionError)
def _blocked(self, args):
"""RabbitMQ Extension."""
reason = args.read_shortstr()
if self.on_blocked:
return self.on_blocked(reason)
def _unblocked(self, *args):
if self.on_unblocked:
return self.on_unblocked()
def _x_close_ok(self):
"""Confirm a connection close
This method confirms a Connection.Close method and tells the
recipient that it is safe to release resources for the
connection and close the socket.
RULE:
A peer that detects a socket closure without having
received a Close-Ok handshake method SHOULD log the error.
"""
self._send_method((10, 51))
self._do_close()
def _close_ok(self, args):
"""Confirm a connection close
This method confirms a Connection.Close method and tells the
recipient that it is safe to release resources for the
connection and close the socket.
RULE:
A peer that detects a socket closure without having
received a Close-Ok handshake method SHOULD log the error.
"""
self._do_close()
def _x_open(self, virtual_host, capabilities=''):
"""Open connection to virtual host
This method opens a connection to a virtual host, which is a
collection of resources, and acts to separate multiple
application domains within a server.
RULE:
The client MUST open the context before doing any work on
the connection.
PARAMETERS:
virtual_host: shortstr
virtual host name
The name of the virtual host to work with.
RULE:
If the server supports multiple virtual hosts, it
MUST enforce a full separation of exchanges,
queues, and all associated entities per virtual
host. An application, connected to a specific
virtual host, MUST NOT be able to access resources
of another virtual host.
RULE:
The server SHOULD verify that the client has
permission to access the specified virtual host.
RULE:
The server MAY configure arbitrary limits per
virtual host, such as the number of each type of
entity that may be used, per connection and/or in
total.
capabilities: shortstr
required capabilities
The client may specify a number of capability names,
delimited by spaces. The server can use this string
to how to process the client's connection request.
"""
args = AMQPWriter()
args.write_shortstr(virtual_host)
args.write_shortstr(capabilities)
args.write_bit(False)
self._send_method((10, 40), args)
return self.wait(allowed_methods=[
(10, 41), # Connection.open_ok
])
def _open_ok(self, args):
"""Signal that the connection is ready
This method signals to the client that the connection is ready
for use.
PARAMETERS:
known_hosts: shortstr (deprecated)
"""
AMQP_LOGGER.debug('Open OK!')
def _secure(self, args):
"""Security mechanism challenge
The SASL protocol works by exchanging challenges and responses
until both peers have received sufficient information to
authenticate each other. This method challenges the client to
provide more information.
PARAMETERS:
challenge: longstr
security challenge data
Challenge information, a block of opaque binary data
passed to the security mechanism.
"""
challenge = args.read_longstr() # noqa
def _x_secure_ok(self, response):
"""Security mechanism response
This method attempts to authenticate, passing a block of SASL
data for the security mechanism at the server side.
PARAMETERS:
response: longstr
security response data
A block of opaque data passed to the security
mechanism. The contents of this data are defined by
the SASL security mechanism.
"""
args = AMQPWriter()
args.write_longstr(response)
self._send_method((10, 21), args)
def _start(self, args):
"""Start connection negotiation
This method starts the connection negotiation process by
telling the client the protocol version that the server
proposes, along with a list of security mechanisms which the
client can use for authentication.
RULE:
If the client cannot handle the protocol version suggested
by the server it MUST close the socket connection.
RULE:
The server MUST provide a protocol version that is lower
than or equal to that requested by the client in the
protocol header. If the server cannot support the
specified protocol it MUST NOT send this method, but MUST
close the socket connection.
PARAMETERS:
version_major: octet
protocol major version
The protocol major version that the server agrees to
use, which cannot be higher than the client's major
version.
version_minor: octet
protocol major version
The protocol minor version that the server agrees to
use, which cannot be higher than the client's minor
version.
server_properties: table
server properties
mechanisms: longstr
available security mechanisms
A list of the security mechanisms that the server
supports, delimited by spaces. Currently ASL supports
these mechanisms: PLAIN.
locales: longstr
available message locales
A list of the message locales that the server
supports, delimited by spaces. The locale defines the
language in which the server will send reply texts.
RULE:
All servers MUST support at least the en_US
locale.
"""
self.version_major = args.read_octet()
self.version_minor = args.read_octet()
self.server_properties = args.read_table()
self.mechanisms = args.read_longstr().split(' ')
self.locales = args.read_longstr().split(' ')
AMQP_LOGGER.debug(
START_DEBUG_FMT,
self.version_major, self.version_minor,
self.server_properties, self.mechanisms, self.locales,
)
def _x_start_ok(self, client_properties, mechanism, response, locale):
"""Select security mechanism and locale
This method selects a SASL security mechanism. ASL uses SASL
(RFC2222) to negotiate authentication and encryption.
PARAMETERS:
client_properties: table
client properties
mechanism: shortstr
selected security mechanism
A single security mechanisms selected by the client,
which must be one of those specified by the server.
RULE:
The client SHOULD authenticate using the highest-
level security profile it can handle from the list
provided by the server.
RULE:
The mechanism field MUST contain one of the
security mechanisms proposed by the server in the
Start method. If it doesn't, the server MUST close
the socket.
response: longstr
security response data
A block of opaque data passed to the security
mechanism. The contents of this data are defined by
the SASL security mechanism. For the PLAIN security
mechanism this is defined as a field table holding two
fields, LOGIN and PASSWORD.
locale: shortstr
selected message locale
A single message local selected by the client, which
must be one of those specified by the server.
"""
if self.server_capabilities.get('consumer_cancel_notify'):
if 'capabilities' not in client_properties:
client_properties['capabilities'] = {}
client_properties['capabilities']['consumer_cancel_notify'] = True
if self.server_capabilities.get('connection.blocked'):
if 'capabilities' not in client_properties:
client_properties['capabilities'] = {}
client_properties['capabilities']['connection.blocked'] = True
args = AMQPWriter()
args.write_table(client_properties)
args.write_shortstr(mechanism)
args.write_longstr(response)
args.write_shortstr(locale)
self._send_method((10, 11), args)
def _tune(self, args):
"""Propose connection tuning parameters
This method proposes a set of connection configuration values
to the client. The client can accept and/or adjust these.
PARAMETERS:
channel_max: short
proposed maximum channels
The maximum total number of channels that the server
allows per connection. Zero means that the server does
not impose a fixed limit, but the number of allowed
channels may be limited by available server resources.
frame_max: long
proposed maximum frame size
The largest frame size that the server proposes for
the connection. The client can negotiate a lower
value. Zero means that the server does not impose any
specific limit but may reject very large frames if it
cannot allocate resources for them.
RULE:
Until the frame-max has been negotiated, both
peers MUST accept frames of up to 4096 octets
large. The minimum non-zero value for the frame-
max field is 4096.
heartbeat: short
desired heartbeat delay
The delay, in seconds, of the connection heartbeat
that the server wants. Zero means the server does not
want a heartbeat.
"""
client_heartbeat = self.client_heartbeat or 0
self.channel_max = args.read_short() or self.channel_max
self.frame_max = args.read_long() or self.frame_max
self.method_writer.frame_max = self.frame_max
self.server_heartbeat = args.read_short() or 0
# negotiate the heartbeat interval to the smaller of the
# specified values
if self.server_heartbeat == 0 or client_heartbeat == 0:
self.heartbeat = max(self.server_heartbeat, client_heartbeat)
else:
self.heartbeat = min(self.server_heartbeat, client_heartbeat)
# Ignore server heartbeat if client_heartbeat is disabled
if not self.client_heartbeat:
self.heartbeat = 0
self._x_tune_ok(self.channel_max, self.frame_max, self.heartbeat)
def send_heartbeat(self):
self.transport.write_frame(8, 0, bytes())
def heartbeat_tick(self, rate=2):
"""Send heartbeat packets, if necessary, and fail if none have been
received recently. This should be called frequently, on the order of
once per second.
:keyword rate: Ignored
"""
if not self.heartbeat:
return
# treat actual data exchange in either direction as a heartbeat
sent_now = self.method_writer.bytes_sent
recv_now = self.method_reader.bytes_recv
if self.prev_sent is None or self.prev_sent != sent_now:
self.last_heartbeat_sent = monotonic()
if self.prev_recv is None or self.prev_recv != recv_now:
self.last_heartbeat_received = monotonic()
self.prev_sent, self.prev_recv = sent_now, recv_now
# send a heartbeat if it's time to do so
if monotonic() > self.last_heartbeat_sent + self.heartbeat:
self.send_heartbeat()
self.last_heartbeat_sent = monotonic()
# if we've missed two intervals' heartbeats, fail; this gives the
# server enough time to send heartbeats a little late
if (self.last_heartbeat_received and
self.last_heartbeat_received + 2 *
self.heartbeat < monotonic()):
raise ConnectionForced('Too many heartbeats missed')
def _x_tune_ok(self, channel_max, frame_max, heartbeat):
"""Negotiate connection tuning parameters
This method sends the client's connection tuning parameters to
the server. Certain fields are negotiated, others provide
capability information.
PARAMETERS:
channel_max: short
negotiated maximum channels
The maximum total number of channels that the client
will use per connection. May not be higher than the
value specified by the server.
RULE:
The server MAY ignore the channel-max value or MAY
use it for tuning its resource allocation.
frame_max: long
negotiated maximum frame size
The largest frame size that the client and server will
use for the connection. Zero means that the client
does not impose any specific limit but may reject very
large frames if it cannot allocate resources for them.
Note that the frame-max limit applies principally to
content frames, where large contents can be broken
into frames of arbitrary size.
RULE:
Until the frame-max has been negotiated, both
peers must accept frames of up to 4096 octets
large. The minimum non-zero value for the frame-
max field is 4096.
heartbeat: short
desired heartbeat delay
The delay, in seconds, of the connection heartbeat
that the client wants. Zero means the client does not
want a heartbeat.
"""
args = AMQPWriter()
args.write_short(channel_max)
args.write_long(frame_max)
args.write_short(heartbeat or 0)
self._send_method((10, 31), args)
self._wait_tune_ok = False
@property
def sock(self):
return self.transport.sock
@property
def server_capabilities(self):
return self.server_properties.get('capabilities') or {}
_METHOD_MAP = {
(10, 10): _start,
(10, 20): _secure,
(10, 30): _tune,
(10, 41): _open_ok,
(10, 50): _close,
(10, 51): _close_ok,
(10, 60): _blocked,
(10, 61): _unblocked,
}
_IMMEDIATE_METHODS = []
connection_errors = (
ConnectionError,
socket.error,
IOError,
OSError,
)
channel_errors = (ChannelError, )
recoverable_connection_errors = (
RecoverableConnectionError,
socket.error,
IOError,
OSError,
)
recoverable_channel_errors = (
RecoverableChannelError,
)
|
DESHRAJ/fjord
|
vendor/packages/amqp/amqp/connection.py
|
Python
|
bsd-3-clause
| 34,121
|
"""Knuth-Morris-Pratt finds the 1st occurrence of a pattern in a text string wo/backing-up."""
import collections as cx
class KMP(object): # O ~ txtlen + patlen * alphabet-size (wc)
"""finds the first occurrence of a pattern string in a text string."""
def __init__(self, pat):
"""Preprocesses the pat string."""
self._pat = list(pat)
self._M = len(self._pat)
self._miss = [0 for i in range(self._M)]
# build DFA (Deterministic finite state automatom) from pat
self._dfa = cx.defaultdict(lambda: [0 for i in range(self._M)])
self._dfa[self._pat[0]][0] = 1
X = 0
for j, letter in enumerate(self._pat[1:], 1):
for c in self._dfa.keys():
self._dfa[c][j] = self._dfa[c][X] # Copy mismatch cases.
self._dfa[letter][j] = j+1 # Set match case.
X = self._dfa[letter][X] # Update restart state.
def search(self, txt):
"""Returns the idx of the 1st occurrrence of the pattern string in the text string."""
# simulate operation of DFA on text
M = self._M
N = len(txt)
i = 0
j = 0
while i < N and j < M:
j = self._dfa.get(txt[i], self._miss)[j]
i += 1
if j == M: return i - M # found
return N # not found, return text size
def prt_dfa(self, prt):
"""Print DFA (Deterministic finite state automatom) from pat."""
prt.write(" {}\n".format(' '.join(self._pat)))
prt.write(" {} <- Current State\n".format(' '.join(str(i) for i in range(self._M))))
for pat_letter, state_nxt in sorted(self._dfa.items()):
prt.write("{} -> {}\n".format(pat_letter, ' '.join(str(s) for s in state_nxt)))
# Copyright 2002-2016, Robert Sedgewick and Kevin Wayne.
# Copyright 2015-2019, DV Klopfenstein, Python implementation.
|
dvklopfenstein/PrincetonAlgorithms
|
py/AlgsSedgewickWayne/KMP.py
|
Python
|
gpl-2.0
| 1,775
|
"""
Use this plugin to activate coverage report.
To install this plugin, you need to activate ``coverage-plugin``
with extra requirements :
::
$ pip install nose2[coverage-plugin]
Next, you can enable coverage reporting with :
::
$ nose2 --with-coverage
Or with this lines in ``unittest.cfg`` :
::
[coverage]
always-on = True
"""
from nose2.events import Plugin
class Coverage(Plugin):
configSection = 'coverage'
commandLineSwitch = ('C', 'with-coverage', 'Turn on coverage reporting')
def __init__(self):
"""Get our config and add our command line arguments."""
self.conSource = self.config.as_list('coverage', [])
self.conReport = self.config.as_list('coverage-report', [])
self.conConfig = self.config.as_str('coverage-config', '').strip()
group = self.session.pluginargs
group.add_argument(
'--coverage', action='append', default=[], metavar='PATH',
dest='coverage_source',
help='Measure coverage for filesystem path (multi-allowed)'
)
group.add_argument(
'--coverage-report', action='append', default=[], metavar='TYPE',
choices=['term', 'term-missing', 'annotate', 'html', 'xml'],
dest='coverage_report',
help='Generate selected reports, available types:'
' term, term-missing, annotate, html, xml (multi-allowed)'
)
group.add_argument(
'--coverage-config', action='store', default='', metavar='FILE',
dest='coverage_config',
help='Config file for coverage, default: .coveragerc'
)
def handleArgs(self, event):
"""Get our options in order command line, config file, hard coded."""
self.covSource = (event.args.coverage_source or
self.conSource or ['.'])
self.covReport = (event.args.coverage_report or
self.conReport or ['term'])
self.covConfig = (event.args.coverage_config or
self.conConfig or '.coveragerc')
def startTestRun(self, event):
"""Only called if active so start coverage."""
self.covController = None
try:
import cov_core
except:
print('Warning: you need to install [coverage-plugin] '
'extra requirements to use this plugin')
return
self.covController = cov_core.Central(self.covSource,
self.covReport,
self.covConfig)
self.covController.start()
def afterSummaryReport(self, event):
"""Only called if active so stop coverage and produce reports."""
if self.covController:
self.covController.finish()
self.covController.summary(event.stream)
|
drnextgis/QGIS
|
python/ext-libs/nose2/plugins/coverage.py
|
Python
|
gpl-2.0
| 2,893
|
from django.core.management.base import BaseCommand
import logging
import sys
DEFAULT_VERBOSITY = 1
VERBOSITY_LOG_MAP = {
0: logging.WARN,
1: logging.INFO,
2: logging.DEBUG,
3: logging.DEBUG,
}
class CustomBaseCommand(BaseCommand):
'''
Management command with a convenient self.log.info shortcut allowing for
easy logging
Listens to the verbosity command given to the management command
'''
def __init__(self, session=None):
self._session = session
self.verbosity = DEFAULT_VERBOSITY
BaseCommand.__init__(self)
def handle(self, *args, **kwargs):
self.verbosity = int(kwargs.get('verbosity', DEFAULT_VERBOSITY))
def create_logger(self):
name = self.__class__.__module__.split('.')[-1]
logger_name = 'management.commands.%s' % name
logger = logging.getLogger(logger_name)
logger.extra = {
'view': logger_name,
'data': {
'command': ' '.join(sys.argv),
}
}
handler = logging.StreamHandler()
logger.addHandler(handler)
logger.setLevel(VERBOSITY_LOG_MAP[self.verbosity])
return logger
@property
def log(self):
if not hasattr(self, 'logger'):
self.logger = self.create_logger()
return self.logger
|
danosaure/Django-facebook
|
django_facebook/management/commands/base.py
|
Python
|
bsd-3-clause
| 1,339
|
#! /usr/bin/python
# encoding: utf-8
d1 = dict(a=1, b=[1,2,3], c=tuple([1,2,3]))
d2 = {'a':1, 'b':[1,2,3], 'c':tuple([1,2,3])}
print d1, '\n', d2
print d1 == d2
print d1 is d2
d3 = d2
print d2 is d3
t1 = tuple([1,2,3])
t2 = tuple([1,2,3])
t3 = t2
print '\n'
print t1==t2
print t3==t2
print t1 is t2
print t3 is t2
|
kumalee/python-101
|
part-2/collect/dicts.py
|
Python
|
mit
| 316
|
"""
pyexcel.internal.source_plugin
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Second level abstraction
:copyright: (c) 2015-2020 by Onni Software Ltd.
:license: New BSD License
"""
from pyexcel import constants as constants
from pyexcel import exceptions as exceptions
from lml.plugin import PluginManager
from pyexcel.internal.attributes import (
register_book_attribute,
register_sheet_attribute,
)
from pyexcel_io import constants as io_constants
REGISTRY_KEY_FORMAT = "%s-%s"
# ignore the following attributes
NO_DOT_NOTATION = (io_constants.DB_DJANGO, io_constants.DB_SQL)
class SourcePluginManager(PluginManager):
"""Data source plugin loader"""
def __init__(self):
PluginManager.__init__(self, "source")
self.keywords = {}
def load_me_later(self, plugin_info):
PluginManager.load_me_later(self, plugin_info)
self._register_a_plugin_info(plugin_info)
def load_me_now(self, key, action=None, library=None, **keywords):
"""get source module into memory for use"""
self._logger.debug("load me now:" + key)
plugin = None
for source in self.registry[key]:
if source.is_my_business(action, **keywords):
plugin = self.dynamic_load_library(source)
module_name = _get_me_pypi_package_name(plugin.__module__)
if library and module_name != library:
continue
else:
break
else:
# nothing is found, no break
_error_handler(action, **keywords)
return plugin
def register_a_plugin(self, plugin_cls, plugin_info):
""" for dynamically loaded plugin """
PluginManager.register_a_plugin(self, plugin_cls, plugin_info)
self._register_a_plugin_info(plugin_info)
def get_a_plugin(
self, target=None, action=None, source_library=None, **keywords
):
"""obtain a source plugin for signature functions"""
key = REGISTRY_KEY_FORMAT % (target, action)
io_library = None
# backward support pyexcel-io library parameter
if "library" in keywords:
io_library = keywords.pop("library")
source_cls = self.load_me_now(
key, action=action, library=source_library, **keywords
)
if io_library is not None:
keywords["library"] = io_library
source_instance = source_cls(**keywords)
return source_instance
def get_source(self, **keywords):
"""obtain a sheet read source plugin for signature functions"""
return self.get_a_plugin(
target=constants.SHEET, action=constants.READ_ACTION, **keywords
)
def get_book_source(self, **keywords):
"""obtain a book read source plugin for signature functions"""
return self.get_a_plugin(
target=constants.BOOK, action=constants.READ_ACTION, **keywords
)
def get_writable_source(self, **keywords):
"""obtain a sheet write source plugin for signature functions"""
return self.get_a_plugin(
target=constants.SHEET, action=constants.WRITE_ACTION, **keywords
)
def get_writable_book_source(self, **keywords):
"""obtain a book write source plugin for signature functions"""
return self.get_a_plugin(
target=constants.BOOK, action=constants.WRITE_ACTION, **keywords
)
def get_keyword_for_parameter(self, key):
"""custom keyword for an attribute"""
return self.keywords.get(key, None)
def _register_a_plugin_info(self, plugin_info):
debug_registry = "Source registry: "
debug_attribute = "Instance attribute: "
anything = False
for key in plugin_info.tags():
target, action = key.split("-")
attributes = plugin_info.attributes
if not isinstance(attributes, list):
attributes = attributes()
for attr in attributes:
if attr in NO_DOT_NOTATION:
continue
if target == "book":
register_book_attribute(target, action, attr)
elif target == "sheet":
register_sheet_attribute(target, action, attr)
else:
raise Exception("Known target: %s" % target)
debug_attribute += "%s " % attr
self.keywords[attr] = plugin_info.key
anything = True
debug_attribute += ", "
if anything:
self._logger.debug(debug_attribute)
self._logger.debug(debug_registry)
def _error_handler(action, **keywords):
if keywords:
file_type = keywords.get("file_type", None)
if file_type:
raise exceptions.FileTypeNotSupported(
constants.FILE_TYPE_NOT_SUPPORTED_FMT % (file_type, action)
)
else:
if "on_demand" in keywords:
keywords.pop("on_demand")
msg = "Please check if there were typos in "
msg += "function parameters: %s. Otherwise "
msg += "unrecognized parameters were given."
raise exceptions.UnknownParameters(msg % keywords)
else:
raise exceptions.UnknownParameters("No parameters found!")
def _get_me_pypi_package_name(module_name):
root_module_name = module_name.split(".")[0]
return root_module_name.replace("_", "-")
SOURCE = SourcePluginManager()
|
chfw/pyexcel
|
pyexcel/internal/source_plugin.py
|
Python
|
bsd-3-clause
| 5,534
|
# -*- coding: utf-8 -*-
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from users.api import UserViewSet
router = DefaultRouter()
router.register('user', UserViewSet, base_name='user')
urlpatterns = [
url(r'1.0/', include(router.urls)), # include de las url's router
]
|
krainet/Wordplease
|
users/api_urls.py
|
Python
|
mit
| 320
|
## Administrator interface for BibIndex
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio OAI Harvest Administrator Interface."""
__revision__ = "$Id$"
import re
import os
import cgi
import time
import urllib
import tempfile
import datetime
from httplib import InvalidURL
from invenio.config import \
CFG_SITE_LANG, \
CFG_TMPDIR, \
CFG_SITE_URL,\
CFG_ETCDIR, \
CFG_BINDIR, \
CFG_LOGDIR
from invenio.oai_harvest_config import CFG_OAI_POSSIBLE_POSTMODES
from invenio.bibrankadminlib import \
write_outcome, \
addadminbox, \
tupletotable, \
createhiddenform
from invenio.dbquery import run_sql
from invenio.oai_harvest_dblayer import get_history_entries, \
get_month_logs_size, get_history_entries_for_day, \
get_day_logs_size, get_entry_history, get_entry_logs_size, \
get_holdingpen_entries, delete_holdingpen_entry, \
get_holdingpen_years, get_holdingpen_month, get_holdingpen_year, \
get_holdingpen_day_fragment, get_holdingpen_entry_details
from invenio.search_engine import get_record
import invenio.template
from invenio import oai_harvest_daemon
from invenio.xmlmarc2textmarc import create_marc_record
from invenio.bibrecord import create_record
from invenio.urlutils import create_html_link
bibharvest_templates = invenio.template.load('bibharvest')
from invenio.messages import gettext_set_language
tmppath = CFG_TMPDIR + '/oaiharvestadmin.' + str(os.getpid())
guideurl = "help/admin/oai-admin-guide"
oai_harvest_admin_url = CFG_SITE_URL + \
"/admin/bibharvest/oaiharvestadmin.py"
freqs = [[0, "never"],
[24, "daily"],
[168, "weekly"],
[720, "monthly"] ]
dates = [[0, "from beginning"],
[1, "from today"]]
def getnavtrail(previous = '', ln = CFG_SITE_LANG):
"""Get the navtrail"""
return bibharvest_templates.tmpl_getnavtrail(previous = previous,
ln = ln)
def generate_sources_actions_menu(oai_src_id, ln=CFG_SITE_LANG):
"""
Create the links/actions to administrate the given OAI source.
"""
_ = gettext_set_language(ln)
default_link_argd = {'ln': ln,
'oai_src_id': str(oai_src_id)}
edit_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/editsource",
urlargd=default_link_argd,
link_label=_("edit"))
del_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/delsource",
urlargd=default_link_argd,
link_label=_("delete"))
test_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/testsource",
urlargd=default_link_argd,
link_label=_("test"))
hist_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/viewhistory",
urlargd=default_link_argd,
link_label=_("history"))
harvest_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/harvest",
urlargd=default_link_argd,
link_label=_("harvest"))
return edit_link + " / " + del_link + " / " + test_link + \
" / " + hist_link + " / " + harvest_link
def perform_request_index(ln=CFG_SITE_LANG):
"""start area for administering harvesting from OAI repositories"""
_ = gettext_set_language(ln)
titlebar = bibharvest_templates.tmpl_draw_titlebar(ln = ln, title = _("Overview of sources"), guideurl = guideurl, extraname = "add new OAI source" , extraurl = "admin/bibharvest/oaiharvestadmin.py/addsource?ln=" + ln)
titlebar2 = bibharvest_templates.tmpl_draw_titlebar(ln = ln, title = _("Harvesting status"), guideurl = guideurl)
header = ['name', 'baseURL', 'metadataprefix', 'frequency',
'bibconvertfile', 'postprocess', 'actions']
header2 = ['name', 'last update']
oai_src = get_oai_src()
upd_status = get_update_status()
sources = []
for (oai_src_id, oai_src_name, oai_src_baseurl, oai_src_prefix, \
oai_src_frequency, oai_src_config, oai_src_post, \
oai_src_bibfilter, oai_src_setspecs) in oai_src:
default_link_argd = {'ln': ln,
'oai_src_id': str(oai_src_id)}
namelinked = create_html_link(urlbase=oai_harvest_admin_url + \
"/editsource",
urlargd=default_link_argd,
link_label=cgi.escape(oai_src_name))
freq = _("Not Set")
if oai_src_frequency == 0: freq = _("never")
elif oai_src_frequency == 24: freq = _("daily")
elif oai_src_frequency == 168: freq = _("weekly")
elif oai_src_frequency == 720: freq = _("monthly")
action = generate_sources_actions_menu(oai_src_id, ln)
sources.append([namelinked, oai_src_baseurl, oai_src_prefix,
freq, oai_src_config, oai_src_post, action])
updates = []
for (upd_name, upd_status) in upd_status:
if not upd_status:
upd_status = bibharvest_templates.tmpl_print_warning(ln, _("Never harvested"))
else: #cut away leading zeros
upd_status = re.sub(r'\.[0-9]+$', '', str(upd_status))
updates.append([upd_name, upd_status])
(schtime, schstatus) = get_next_schedule()
if schtime:
schtime = re.sub(r'\.[0-9]+$', '', str(schtime))
holdingpen_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/viewholdingpen",
urlargd={'ln': ln},
link_label=_("View Holding Pen"))
output = titlebar
output += bibharvest_templates.tmpl_output_numbersources(ln, get_tot_oai_src())
output += tupletotable(header=header, tuple=sources)
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += titlebar2
output += bibharvest_templates.tmpl_output_schedule(ln, schtime, str(schstatus))
output += holdingpen_link
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += tupletotable(header=header2, tuple=updates)
return output
def perform_request_editsource(oai_src_id=None, oai_src_name='',
oai_src_baseurl='', oai_src_prefix='',
oai_src_frequency='',
oai_src_config='',
oai_src_post='',ln=CFG_SITE_LANG,
confirm=-1, oai_src_sets=None,
oai_src_bibfilter=''):
"""creates html form to edit a OAI source. this method is calling other methods which again is calling this and sending back the output of the method.
confirm - determines the validation status of the data input into the form"""
_ = gettext_set_language(ln)
if oai_src_id is None:
return _("No OAI source ID selected.")
if oai_src_sets is None:
oai_src_sets = []
output = ""
subtitle = bibharvest_templates.tmpl_draw_subtitle(ln = ln, title = "edit source", subtitle = "Edit OAI source", guideurl = guideurl)
if confirm in [-1, "-1"]:
oai_src = get_oai_src(oai_src_id)
oai_src_name = oai_src[0][1]
oai_src_baseurl = oai_src[0][2]
oai_src_prefix = oai_src[0][3]
oai_src_frequency = oai_src[0][4]
oai_src_config = oai_src[0][5]
oai_src_post = oai_src[0][6]
oai_src_sets = oai_src[0][7].split()
oai_src_bibfilter = oai_src[0][8]
text = bibharvest_templates.tmpl_print_brs(ln, 1)
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, title = "Source name", name = "oai_src_name", value = oai_src_name)
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, title = "Base URL", name = "oai_src_baseurl", value = oai_src_baseurl)
sets = findSets(oai_src_baseurl)
if sets:
sets.sort()
# Show available sets to users
sets_specs = [set[0] for set in sets]
sets_names = [set[1] for set in sets]
sets_labels = [((set[1] and set[0]+' ('+set[1]+')') or set[0]) \
for set in sets]
sets_states = [ ((set[0] in oai_src_sets and 1) or 0) for set in sets]
text += bibharvest_templates.tmpl_admin_checkboxes(ln=ln,
title="Sets",
name="oai_src_sets",
values=sets_specs,
labels=sets_labels,
states=sets_states)
else:
# Let user specify sets in free textbox
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln,
title = "Sets",
name='oai_src_sets',
value=' '.join(oai_src_sets))
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, \
title = "Metadata prefix", name = "oai_src_prefix", value = oai_src_prefix)
text += bibharvest_templates.tmpl_admin_w200_select(ln = ln, \
title = "Frequency", name = "oai_src_frequency", \
valuenil = "- select frequency -" , values = freqs, \
lastval = oai_src_frequency)
text += bibharvest_templates.tmpl_admin_w200_select(ln = ln, \
title = "Postprocess", name = "oai_src_post", \
valuenil = "- select mode -" , values = CFG_OAI_POSSIBLE_POSTMODES, \
lastval = oai_src_post)
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, \
title = "BibConvert configuration file (if needed by postprocess)", \
name = "oai_src_config", value = oai_src_config, \
suffix="<small>Eg: </small><code>oaidc2marcxml.xsl</code>, <code>oaimarc2marcxml.xsl</code><br/>")
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, \
title = "BibFilter program (if needed by postprocess)", \
name = "oai_src_bibfilter", value = oai_src_bibfilter)
text += bibharvest_templates.tmpl_print_brs(ln, 2)
output += createhiddenform(action="editsource#1",
text=text,
button="Modify",
oai_src_id=oai_src_id,
ln=ln,
confirm=1)
if confirm in [1, "1"] and not oai_src_name:
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a name for the source.")
elif confirm in [1, "1"] and not oai_src_prefix:
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a metadata prefix.")
elif confirm in [1, "1"] and not oai_src_baseurl:
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a base url.")
elif confirm in [1, "1"] and not oai_src_frequency:
output += bibharvest_templates.tmpl_print_info(ln, "Please choose a frequency of harvesting")
elif confirm in [1, "1"] and not oai_src_post:
output += bibharvest_templates.tmpl_print_info(ln, "Please choose a postprocess mode")
elif confirm in [1, "1"] and oai_src_post.startswith("h-c") and (not oai_src_config or validatefile(oai_src_config)!=0):
output += bibharvest_templates.tmpl_print_info(ln, "You selected a postprocess mode which involves conversion.")
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a valid name of or a full path to a BibConvert config file or change postprocess mode.")
elif oai_src_id > -1 and confirm in [1, "1"]:
if not oai_src_frequency:
oai_src_frequency = 0
if not oai_src_config:
oai_src_config = "NULL"
if not oai_src_post:
oai_src_post = "h"
res = modify_oai_src(oai_src_id, oai_src_name, oai_src_baseurl, oai_src_prefix, oai_src_frequency, oai_src_config, oai_src_post, oai_src_sets, oai_src_bibfilter)
output += write_outcome(res)
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Go back to the OAI sources overview"))
body = [output]
return addadminbox(subtitle, body)
def perform_request_addsource(oai_src_name=None, oai_src_baseurl='',
oai_src_prefix='', oai_src_frequency='',
oai_src_lastrun='', oai_src_config='',
oai_src_post='', ln=CFG_SITE_LANG,
confirm=-1, oai_src_sets=None,
oai_src_bibfilter=''):
"""creates html form to add a new source"""
_ = gettext_set_language(ln)
if oai_src_name is None:
return "No OAI source name selected."
if oai_src_sets is None:
oai_src_sets = []
subtitle = bibharvest_templates.tmpl_draw_subtitle(ln=ln,
title="add source",
subtitle="Add new OAI source",
guideurl=guideurl)
output = ""
if confirm <= -1:
text = bibharvest_templates.tmpl_print_brs(ln, 1)
text += bibharvest_templates.tmpl_admin_w200_text(ln=ln,
title="Enter the base url",
name="oai_src_baseurl",
value=oai_src_baseurl+'http://')
output = createhiddenform(action="addsource",
text=text,
ln=ln,
button="Validate",
confirm=0)
if (confirm not in ["-1", -1] and validate(oai_src_baseurl)[0] == 0) or \
confirm in ["1", 1]:
output += bibharvest_templates.tmpl_output_validate_info(ln, 0, str(oai_src_baseurl))
output += bibharvest_templates.tmpl_print_brs(ln, 2)
text = bibharvest_templates.tmpl_admin_w200_text(ln=ln,
title="Source name",
name="oai_src_name",
value=oai_src_name)
metadatas = findMetadataFormats(oai_src_baseurl)
if metadatas:
# Show available metadata to user
prefixes = []
for value in metadatas:
prefixes.append([value, str(value)])
text += bibharvest_templates.tmpl_admin_w200_select(ln=ln,
title="Metadata prefix",
name="oai_src_prefix",
valuenil="- select prefix -" ,
values=prefixes,
lastval=oai_src_prefix)
else:
# Let user specify prefix in free textbox
text += bibharvest_templates.tmpl_admin_w200_text(ln=ln,
title="Metadata prefix",
name="oai_src_prefix",
value=oai_src_prefix)
sets = findSets(oai_src_baseurl)
if sets:
sets.sort()
# Show available sets to users
sets_specs = [set[0] for set in sets]
sets_names = [set[1] for set in sets]
sets_labels = [((set[1] and set[0]+' ('+set[1]+')') or set[0]) \
for set in sets]
sets_states = [ ((set[0] in oai_src_sets and 1) or 0) \
for set in sets]
text += bibharvest_templates.tmpl_admin_checkboxes(ln=ln,
title="Sets",
name="oai_src_sets",
values=sets_specs,
labels=sets_labels,
states=sets_states)
else:
# Let user specify sets in free textbox
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln,
title = "Sets",
name='oai_src_sets',
value=' '.join(oai_src_sets))
text += bibharvest_templates.tmpl_admin_w200_select(ln = ln, \
title = "Frequency", name = "oai_src_frequency", \
valuenil = "- select frequency -" , values = freqs, \
lastval = oai_src_frequency)
text += bibharvest_templates.tmpl_admin_w200_select(ln = ln, \
title = "Starting date", name = "oai_src_lastrun", \
valuenil = "- select a date -" , values = dates, \
lastval = oai_src_lastrun)
text += bibharvest_templates.tmpl_admin_w200_select(ln = ln, \
title = "Postprocess", name = "oai_src_post", \
valuenil = "- select mode -" , values = CFG_OAI_POSSIBLE_POSTMODES, \
lastval = oai_src_post)
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, \
title = "BibConvert configuration file (if needed by postprocess)", \
name = "oai_src_config", value = oai_src_config)
text += bibharvest_templates.tmpl_admin_w200_text(ln = ln, \
title = "BibFilter program (if needed by postprocess)", \
name = "oai_src_bibfilter", value = oai_src_bibfilter)
text += bibharvest_templates.tmpl_print_brs(ln, 2)
output += createhiddenform(action="addsource#1",
text=text,
button="Add OAI Source",
oai_src_baseurl=oai_src_baseurl,
ln=ln,
confirm=1)
elif confirm in ["0", 0] and validate(oai_src_baseurl)[0] > 0:
# Could not perform first url validation
output += bibharvest_templates.tmpl_output_validate_info(ln, 1, str(oai_src_baseurl))
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/addsource",
urlargd={'ln': ln},
link_label=_("Try again with another url"))
output += " " + _("or") + " "
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/addsource",
urlargd={'ln': ln,
'oai_src_baseurl': oai_src_baseurl,
'confirm': '1'},
link_label=_("Continue anyway"))
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += " " + _("or") + " "
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Go back to the OAI sources overview"))
elif confirm not in ["-1", -1] and validate(oai_src_baseurl)[0] > 0:
output += bibharvest_templates.tmpl_output_validate_info(ln, 1, str(oai_src_baseurl))
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/addsource",
urlargd={'ln': ln},
link_label=_("Try again"))
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += " " + _("or") + " "
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Go back to the OAI sources overview"))
elif confirm not in ["-1", -1]:
lnargs = [["ln", ln]]
output += bibharvest_templates.tmpl_output_error_info(ln, str(oai_src_baseurl), validate(oai_src_baseurl)[1])
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/addsource",
urlargd={'ln': ln},
link_label=_("Try again"))
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += " " + _("or") + " "
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Go back to the OAI sources overview"))
if confirm in [1, "1"] and not oai_src_name:
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a name for the source.")
elif confirm in [1, "1"] and not oai_src_prefix:
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a metadata prefix.")
elif confirm in [1, "1"] and not oai_src_frequency:
output += bibharvest_templates.tmpl_print_info(ln, "Please choose a frequency of harvesting")
elif confirm in [1, "1"] and not oai_src_lastrun:
output += bibharvest_templates.tmpl_print_info(ln, "Please choose the harvesting starting date")
elif confirm in [1, "1"] and not oai_src_post:
output += bibharvest_templates.tmpl_print_info(ln, "Please choose a postprocess mode")
elif confirm in [1, "1"] and oai_src_post.startswith("h-c") and (not oai_src_config or validatefile(oai_src_config)!=0):
output += bibharvest_templates.tmpl_print_info(ln, "You selected a postprocess mode which involves conversion.")
output += bibharvest_templates.tmpl_print_info(ln, "Please enter a valid name of or a full path to a BibConvert config file or change postprocess mode.")
elif oai_src_name and confirm in [1, "1"]:
if not oai_src_frequency:
oai_src_frequency = 0
if not oai_src_lastrun:
oai_src_lastrun = 1
if not oai_src_config:
oai_src_config = "NULL"
if not oai_src_post:
oai_src_post = "h"
res = add_oai_src(oai_src_name, oai_src_baseurl,
oai_src_prefix, oai_src_frequency,
oai_src_lastrun, oai_src_config,
oai_src_post, oai_src_sets,
oai_src_bibfilter)
output += write_outcome(res)
lnargs = [["ln", ln]]
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Go back to the OAI sources overview"))
body = [output]
return addadminbox(subtitle, body)
def perform_request_delsource(oai_src_id=None, ln=CFG_SITE_LANG,
callback='yes', confirm=0):
"""creates html form to delete a source
"""
_ = gettext_set_language(ln)
output = ""
subtitle = ""
if oai_src_id:
oai_src = get_oai_src(oai_src_id)
namesrc = (oai_src[0][1])
pagetitle = """Delete OAI source: %s""" % cgi.escape(namesrc)
subtitle = bibharvest_templates.tmpl_draw_subtitle(ln = ln, \
title = "delete source", subtitle = pagetitle, guideurl = guideurl)
output = ""
if confirm in ["0", 0]:
if oai_src:
question = """Do you want to delete the OAI source '%s' and all its definitions?""" % cgi.escape(namesrc)
text = bibharvest_templates.tmpl_print_info(ln, question)
text += bibharvest_templates.tmpl_print_brs(ln, 3)
output += createhiddenform(action="delsource#5",
text=text,
button="Confirm",
oai_src_id=oai_src_id,
confirm=1)
else:
return bibharvest_templates.tmpl_print_info(ln, "Source specified does not exist.")
elif confirm in ["1", 1]:
res = delete_oai_src(oai_src_id)
if res[0] == 1:
output += bibharvest_templates.tmpl_print_info(ln, "Source removed.")
output += bibharvest_templates.tmpl_print_brs(ln, 1)
output += write_outcome(res)
else:
output += write_outcome(res)
output += bibharvest_templates.tmpl_print_brs(ln, 2)
output += create_html_link(urlbase=oai_harvest_admin_url + \
"/index",
urlargd={'ln': ln},
link_label=_("Go back to the OAI sources overview"))
body = [output]
return addadminbox(subtitle, body)
def perform_request_testsource(oai_src_id=None, ln=CFG_SITE_LANG,
callback='yes', confirm=0,
record_id=None):
"""Test OAI source page"""
_ = gettext_set_language(ln)
if oai_src_id is None:
return _("No OAI source ID selected.")
result = ""
result += bibharvest_templates.tmpl_output_menu(ln, oai_src_id, guideurl)
result += bibharvest_templates.tmpl_draw_titlebar(ln = ln, title = \
"Record ID ( Recognized by the data source )", guideurl=guideurl)
record_str = ""
if record_id != None:
record_str = str(record_id)
form_text = bibharvest_templates.tmpl_admin_w200_text(ln = ln, title = \
"Record identifier", name = "record_id", value = record_str)
result += createhiddenform(action="testsource",
text=form_text,
button="Test",
oai_src_id=oai_src_id,
ln=ln,
confirm=1)
if record_id != None:
result += bibharvest_templates.tmpl_draw_titlebar(ln = ln, title = \
"OAI XML downloaded from the source" , guideurl = guideurl)
result += bibharvest_templates.tmpl_embed_document( \
"/admin/bibharvest/oaiharvestadmin.py/preview_original_xml?oai_src_id=" \
+ str(oai_src_id) + "&record_id=" \
+ str(record_id))
result += bibharvest_templates.tmpl_draw_titlebar(ln = ln, title = \
"MARC XML after all the transformations", guideurl = guideurl)
result += bibharvest_templates.tmpl_embed_document( \
"/admin/bibharvest/oaiharvestadmin.py/preview_harvested_xml?oai_src_id=" \
+ str(oai_src_id) + "&record_id=" \
+ str(record_id))
return result
######################################
### Displaying bibsched task logs ###
######################################
def does_logfile_exist(task_id):
"""
returns logfile name if exists. None otherwise
"""
name = CFG_LOGDIR + "/bibsched_task_" + str(task_id) + ".log"
if os.path.exists(name):
return name
else:
return None
def does_errfile_exist(task_id):
"""
returns logfile name if exists. None otherwise
"""
name = CFG_LOGDIR + "/bibsched_task_" + str(task_id) + ".err"
if os.path.exists(name):
return name
else:
return None
def perform_request_viewtasklogs(ln, confirm, task_id):
t_id = int(task_id) # preventing malicious user input
guideurl = "help/admin/bibharvest-admin-guide"
log_name = does_logfile_exist(t_id)
err_name = does_errfile_exist(t_id)
result = ""
result += bibharvest_templates.tmpl_output_menu(ln, None, guideurl)
if log_name != None:
file = open(log_name)
content = file.read(-1)
file.close();
result += bibharvest_templates.tmpl_draw_titlebar(ln, "Log file : " + \
log_name, guideurl)
result += bibharvest_templates.tmpl_output_scrollable_frame(\
bibharvest_templates.tmpl_output_preformatted(content))
if err_name != None:
file = open(err_name)
content = file.read(-1)
file.close();
result += bibharvest_templates.tmpl_print_brs(ln, 2)
result += bibharvest_templates.tmpl_draw_titlebar(ln, "Log file : " + \
err_name, guideurl)
result += bibharvest_templates.tmpl_output_scrollable_frame(\
bibharvest_templates.tmpl_output_preformatted(content))
return result
### Probably should be moved to some other data-connection file
def build_history_row(item, ln, show_selection, show_oai_source, show_record_ids, identifier = ""):
def get_cssclass(cssclass):
if cssclass == "oddtablecolumn":
return "pairtablecolumn"
else:
return "oddtablecolumn"
cssclass = get_cssclass("pairtablecolumn")
result = bibharvest_templates.tmpl_table_row_begin()
result += bibharvest_templates.tmpl_table_output_cell(\
bibharvest_templates.format_date(item.date_harvested) + " " + \
bibharvest_templates.format_time(item.date_harvested), cssclass = cssclass)
cssclass = get_cssclass(cssclass)
result += bibharvest_templates.tmpl_table_output_cell(\
bibharvest_templates.format_date(item.date_inserted) + " " + \
bibharvest_templates.format_time(item.date_inserted), cssclass = cssclass)
if show_record_ids:
record_history_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/viewentryhistory",
urlargd={'ln': ln,
'oai_id': str(item.oai_id),
'start': "0"},
link_label=str(item.oai_id))
cssclass = get_cssclass(cssclass)
result += bibharvest_templates.tmpl_table_output_cell(record_history_link, \
cssclass = cssclass)
record_details_link = create_html_link(CFG_SITE_URL + \
"/record/" + str(item.record_id),
urlargd={'ln': ln},
link_label=str(item.record_id))
cssclass = get_cssclass(cssclass)
result += bibharvest_templates.tmpl_table_output_cell(record_details_link, \
cssclass = cssclass)
cssclass = get_cssclass(cssclass)
result += bibharvest_templates.tmpl_table_output_cell(item.inserted_to_db, \
cssclass = cssclass)
cssclass = get_cssclass(cssclass)
task_id = str(item.bibupload_task_id)
if does_errfile_exist(item.bibupload_task_id) or does_logfile_exist(item.bibupload_task_id):
task_id = create_html_link(urlbase=oai_harvest_admin_url + \
"/viewtasklogs",
urlargd={'ln': ln,
'task_id': str(item.bibupload_task_id)},
link_label=str(item.bibupload_task_id))
result += bibharvest_templates.tmpl_table_output_cell(task_id, cssclass = cssclass)
if show_selection:
chkbox = bibharvest_templates.tmpl_output_checkbox(item.oai_id, identifier, "1")
cssclass = get_cssclass(cssclass)
result += bibharvest_templates.tmpl_table_output_cell(chkbox, \
cssclass = cssclass)
if show_oai_source:
cssclass = get_cssclass(cssclass)
result += bibharvest_templates.tmpl_table_output_cell(str(item.oai_src_id), \
cssclass = cssclass)
result += bibharvest_templates.tmpl_table_row_end()
return result
def build_history_table_header(show_selection = True, show_oai_source = False, \
show_record_ids = True):
headers = ["Harvesting Date", "Insert date"]
if show_record_ids:
headers += ["Record ID ( OAI )", "Rec. ID <br/>(Invenio)"]
headers += ["DB", "task <br/> number"]
if show_selection:
headers.append("Reharvest")
if show_oai_source:
headers.append("Harvested from <br/> source no")
return headers
def build_month_history_table(oai_src_id, date, ln):
""" Function formats the historical data
@param oai_src_id: identifier of the harvesting source
@param date: date designing the month of interest
@return: String containing the history table
"""
day_limit = 10
orig_data = get_history_entries(oai_src_id, date)
stats = get_month_logs_size(oai_src_id, date)
headers = build_history_table_header()
result = bibharvest_templates.tmpl_table_begin(headers)
identifiers = {}
for day in stats:
result += bibharvest_templates.tmpl_table_row_begin()
d_date = datetime.datetime(date.year, date.month, day)
result += bibharvest_templates.tmpl_history_table_output_day_cell(d_date, \
stats[day], oai_src_id, ln, stats[day] > day_limit)
btn = bibharvest_templates.tmpl_output_select_day_button(day)
result += bibharvest_templates.tmpl_table_output_cell(btn)
result += bibharvest_templates.tmpl_table_row_end()
day_data = get_history_entries_for_day(oai_src_id, d_date, limit = day_limit)
for item in day_data:
identifier = bibharvest_templates.format_date(item.date_harvested) + \
bibharvest_templates.format_time(item.date_harvested) + "_" + item.oai_id
result += build_history_row(item, ln, show_selection = True, show_oai_source = \
False, show_record_ids = True, identifier = identifier)
if not identifiers.has_key(item.date_harvested.day):
identifiers[item.date_harvested.day] = []
identifiers[item.date_harvested.day].append(identifier)
if stats[day] > day_limit:
result += bibharvest_templates.tmpl_history_table_output_day_details_cell(\
ln, d_date, oai_src_id)
result += bibharvest_templates.tmpl_table_end()
result += bibharvest_templates.tmpl_output_identifiers(identifiers)
return result
def build_history_table(data, ln = CFG_SITE_LANG, show_selection = True, \
show_oai_source = False, show_record_ids = True):
headers = build_history_table_header(show_selection = show_selection, \
show_oai_source = show_oai_source, show_record_ids = show_record_ids)
result = bibharvest_templates.tmpl_table_begin(headers)
identifiers = {}
for item in data:
identifier = bibharvest_templates.format_date(item.date_harvested) + \
bibharvest_templates.format_time(item.date_harvested) + "_" + item.oai_id
result += build_history_row(item, ln, show_selection = show_selection, \
show_oai_source = show_oai_source, show_record_ids = show_record_ids, \
identifier = identifier)
if show_selection:
if not identifiers.has_key(item.date_harvested.day):
identifiers[item.date_harvested.day] = []
identifiers[item.date_harvested.day].append(identifier)
result += bibharvest_templates.tmpl_table_end()
if show_selection:
result += bibharvest_templates.tmpl_output_identifiers(identifiers)
return result
def perform_request_viewhistory(oai_src_id = None, ln = CFG_SITE_LANG, callback = \
'yes', confirm = 0, month = None, year = None):
""" Creates html to view the harvesting history """
date = datetime.datetime.now()
if year != None and month != None:
year = int(year)
month = int(month)
date = datetime.datetime(year, month, 1)
result = ""
result += bibharvest_templates.tmpl_output_menu(ln, oai_src_id, guideurl)
result += bibharvest_templates.tmpl_output_history_javascript_functions()
result += bibharvest_templates.tmpl_output_month_selection_bar(oai_src_id, ln, \
current_month = month, current_year = year)
inner_text = build_month_history_table(oai_src_id, date, ln)
inner_text += bibharvest_templates.tmpl_print_brs(ln, 1)
inner_text = bibharvest_templates.tmpl_output_scrollable_frame(inner_text)
inner_text += bibharvest_templates.tmpl_output_selection_bar()
result += createhiddenform(action="/admin/bibharvest/oaiharvestadmin.py/reharvest", \
text = inner_text, button = "Reharvest selected records", oai_src_id = \
oai_src_id, ln = ln)
return result
def perform_request_viewhistoryday(oai_src_id=None, ln=CFG_SITE_LANG,
callback='yes', confirm=0,
month=None, year=None, day=None,
start=0):
"""
Records history page
"""
_ = gettext_set_language(ln)
page_length = 50
result = ""
result += bibharvest_templates.tmpl_output_menu(ln, oai_src_id, guideurl)
considered_date = datetime.datetime.now()
if year != None and month != None and day != None:
considered_date = datetime.datetime(year, month, day)
number_of_records = get_day_logs_size(oai_src_id, considered_date)
return_to_month_link = create_html_link(
urlbase=oai_harvest_admin_url + \
"/viewhistory",
urlargd={'ln': ln,
'oai_src_id': str(oai_src_id),
'year': str(considered_date.year),
'month': str(considered_date.month)},
link_label="<< " + _("Return to the month view"))
next_page_link = ""
if number_of_records > start + page_length:
next_page_link = create_html_link(
urlbase=oai_harvest_admin_url + \
"/viewhistoryday",
urlargd={'ln': ln,
'oai_src_id': str(oai_src_id),
'year': str(considered_date.year),
'month': str(considered_date.month),
'day': str(considered_date.day),
'start': str(start + page_length)},
link_label=_("Next page") + " >>")
prev_page_link = ""
if start > 0:
new_start = start - page_length
if new_start < 0:
new_start = 0
prev_page_link = create_html_link(
urlbase=oai_harvest_admin_url + \
"/viewhistoryday",
urlargd={'ln': ln,
'oai_src_id': str(oai_src_id),
'year': str(considered_date.year),
'month': str(considered_date.month),
'day': str(considered_date.day),
'start': str(new_start)},
link_label="<< " + _("Previous page"))
last_shown = start + page_length
if last_shown > number_of_records:
last_shown = number_of_records
current_day_records = get_history_entries_for_day(oai_src_id, considered_date, limit =\
page_length, start = start)
current_range = " Viewing entries : " + str(start + 1) + "-" + \
str(last_shown) + " "
# Building the interface
result += bibharvest_templates.tmpl_draw_titlebar(ln, "Viewing history of " + str(year)\
+ "-" + str(month) + "-" + str(day) , guideurl)
result += prev_page_link + current_range + next_page_link + \
bibharvest_templates.tmpl_print_brs(ln, 1)
result += bibharvest_templates.tmpl_output_history_javascript_functions()
inner_text = bibharvest_templates.tmpl_output_scrollable_frame(build_history_table(\
current_day_records, ln=ln))
inner_text += bibharvest_templates.tmpl_output_selection_bar()
result += createhiddenform(action="/admin/bibharvest/oaiharvestadmin.py/reharvest", \
text = inner_text, button = "Reharvest selected records", oai_src_id = oai_src_id, ln = ln)
result += return_to_month_link + bibharvest_templates.tmpl_print_brs(ln, 1)
return result
def perform_request_viewentryhistory(oai_id, ln, confirm, start):
"""History of an OAI record"""
_ = gettext_set_language(ln)
page_length = 50
result = ""
result += bibharvest_templates.tmpl_output_menu(ln, None, guideurl)
considered_date = datetime.datetime.now()
number_of_records = get_entry_logs_size(oai_id)
next_page_link = ""
if number_of_records > start + page_length:
prev_page_link = create_html_link(
urlbase=oai_harvest_admin_url + \
"/viewhistoryday",
urlargd={'ln': ln,
'oai_id': str(oai_id),
'start': str(start + page_length)},
link_label=_("Next page") + " >>")
prev_page_link = ""
if start > 0:
new_start = start - page_length
if new_start < 0:
new_start = 0
prev_page_link = create_html_link(
urlbase=oai_harvest_admin_url + \
"/viewhistoryday",
urlargd={'ln': ln,
'oai_id': str(oai_id),
'start': str(new_start)},
link_label="<< " + _("Previous page"))
last_shown = start + page_length
if last_shown > number_of_records:
last_shown = number_of_records
current_entry_records = get_entry_history(oai_id, limit = page_length, start = start)
current_range = " Viewing entries : " + str(start + 1) \
+ "-" + str(last_shown) + " "
# Building the interface
result += bibharvest_templates.tmpl_draw_titlebar(ln, "Viewing history of " + \
str(oai_id) , guideurl)
result += prev_page_link + current_range + next_page_link + \
bibharvest_templates.tmpl_print_brs(ln, 1)
result += bibharvest_templates.tmpl_output_history_javascript_functions()
inner_text = bibharvest_templates.tmpl_output_scrollable_frame(\
build_history_table(current_entry_records, ln, show_selection = False, \
show_oai_source = True, show_record_ids = False))
result += inner_text
result += bibharvest_templates.tmpl_print_brs(ln, 1)
return result
############################################################
### The functions allowing to preview the harvested XML ###
############################################################
def harvest_record(record_id , oai_src_baseurl, oai_src_prefix):
"""
Harvests given record and returns it's string as a result
"""
command = CFG_BINDIR + "/oaiharvest -vGetRecord -i" + record_id \
+ " -p" + oai_src_prefix + " " + oai_src_baseurl
program_output = os.popen(command)
result = program_output.read(-1)
program_output.close()
return result
def convert_record(oai_src_config, record_to_convert):
command = CFG_BINDIR + "/bibconvert -c " + oai_src_config
(s_in, s_out, s_err) = os.popen3(command)
s_in.write(record_to_convert)
s_in.close()
s_err.readlines()
result = s_out.read(-1)
s_err.close()
s_out.close()
return result
def format_record(oai_src_bibfilter, record_to_convert, treat_new = False):
"""
Formats the record using given formatting program.
Returns name of the file containing result,
program output, program error output
"""
(file_descriptor, file_name) = tempfile.mkstemp()
f = os.fdopen(file_descriptor, "w")
f.write(record_to_convert)
f.close()
command = oai_src_bibfilter
if treat_new:
command += " -n"
command += " " + file_name
(program_input, program_output, program_err) = os.popen3(command)
program_input.close()
out = program_output.read(-1)
err = program_err.read(-1)
program_output.close()
program_err.close()
if os.path.exists(file_name + ".insert.xml"):
return (file_name + ".insert.xml", out, err)
else:
return (None, out, err)
def harvest_postprocress_record(oai_src_id, record_id, treat_new = False):
"""Havest ther record and postprocess it"""
oai_src = get_oai_src(oai_src_id)
oai_src_baseurl = oai_src[0][2]
oai_src_prefix = oai_src[0][3]
oai_src_config = oai_src[0][5]
oai_src_post = oai_src[0][6]
oai_src_sets = oai_src[0][7].split()
oai_src_bibfilter = oai_src[0][8]
result = harvest_record(record_id, oai_src_baseurl, oai_src_prefix)
if result == None:
return (False, "Error during harvesting")
if oai_src_post.find("c") != -1:
result = convert_record(oai_src_config, result)
if result == None:
return (False, "Error during converting")
if oai_src_post.find("f") != -1:
fres = format_record(oai_src_bibfilter, result, treat_new = treat_new)
fname = fres[0]
if fname != None:
f = open(fname, "r")
result = f.read(-1)
f.close()
os.remove(fname)
else:
return (False, "Error during formatting: " + fres[1] + "\n\n" + fres[2])
return (True, result)
def upload_record(record = None, uploader_paremeters = None, oai_source_id = None):
"""Upload the given record"""
if record is None:
return
if uploader_paremeters is None:
uploader_paremeters = ["-r", "-i"]
(file_descriptor, file_name) = tempfile.mkstemp()
f = os.fdopen(file_descriptor, "w")
f.write(record)
f.close()
oai_harvest_daemon.call_bibupload(file_name, uploader_paremeters, oai_src_id = oai_source_id)
#command = CFG_BINDIR + "/bibupload " + uploader_paremeters + " "
#command += file_name
#out = os.popen(command)
#output_data = out.read(-1)
#out.close()
def perform_request_preview_original_xml(oai_src_id = None, record_id = None):
"""Harvest a record and return it. No side effect, useful for preview"""
oai_src = get_oai_src(oai_src_id)
oai_src_baseurl = oai_src[0][2]
oai_src_prefix = oai_src[0][3]
oai_src_config = oai_src[0][5]
oai_src_post = oai_src[0][6]
oai_src_sets = oai_src[0][7].split()
oai_src_bibfilter = oai_src[0][8]
record = harvest_record(record_id, oai_src_baseurl, oai_src_prefix)
return record
def perform_request_preview_harvested_xml(oai_src_id = None, record_id = None):
return harvest_postprocress_record(oai_src_id, record_id, treat_new = True)
############################################################
### Reharvesting of already existing records ###
############################################################
def perform_request_reharvest_records(oai_src_id = None, ln = CFG_SITE_LANG, confirm=0, record_ids = None):
for record_id in record_ids:
# 1) Run full harvesing process as in the preview scenarios
transformed = harvest_postprocress_record(oai_src_id, record_id, treat_new = True)[1]
upload_record(transformed, ["-i", "-r"], oai_src_id)
result = bibharvest_templates.tmpl_output_menu(ln, oai_src_id, guideurl)
result += bibharvest_templates.tmpl_print_info(ln, "Submitted for insertion into the database")
return result
def perform_request_harvest_record(oai_src_id = None, ln = CFG_SITE_LANG, confirm=0, record_id = None):
""" Request for harvesting a new record """
if oai_src_id is None:
return "No OAI source ID selected."
result = ""
guideurl = "help/admin/bibharvest-admin-guide"
result += bibharvest_templates.tmpl_output_menu(ln, oai_src_id, guideurl)
result += bibharvest_templates.tmpl_draw_titlebar(ln = ln, \
title = "Record ID ( Recognized by the data source )", guideurl=guideurl)
record_str = ""
if record_id != None:
record_str = str(record_id)
form_text = bibharvest_templates.tmpl_admin_w200_text(ln = ln, \
title = "Record identifier", name = "record_id", value = record_str)
result += createhiddenform(action="harvest",
text=form_text,
button="Harvest",
oai_src_id=oai_src_id,
ln=ln,
confirm=1)
if record_id != None:
# there was a harvest-request
transformed = harvest_postprocress_record(oai_src_id, record_id)[1]
upload_record(transformed, ["-i"], oai_src_id)
result += bibharvest_templates.tmpl_print_info(ln, "Submitted for insertion into the database")
return result
############################
### Holding pen support ###
############################
def build_holdingpen_table(data, ln=CFG_SITE_LANG):
_ = gettext_set_language(ln)
result = ""
headers = ["OAI Record ID", "Insertion Date", "", ""]
result += bibharvest_templates.tmpl_table_begin(headers)
for record in data:
oai_id = record[0]
date_inserted = record[1]
hpupdate_id = record[2]
result += bibharvest_templates.tmpl_table_row_begin()
result += bibharvest_templates.tmpl_table_output_cell(str(oai_id), cssclass = "oddtablecolumn")
result += bibharvest_templates.tmpl_table_output_cell(str(date_inserted), cssclass = "pairtablecolumn")
details_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/viewhprecord",
urlargd={'ln': ln,
'hpupdate_id': str(hpupdate_id)},
link_label=_("Compare with original"))
result += bibharvest_templates.tmpl_table_output_cell(details_link, cssclass = "oddtablecolumn")
delete_hp_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/delhprecord",
urlargd={'ln': ln,
'hpupdate_id' : str(hpupdate_id)},
link_label=_("Delete from holding pen"))
result += bibharvest_templates.tmpl_table_output_cell(delete_hp_link, cssclass = "pairtablecolumn")
result += bibharvest_templates.tmpl_table_row_end()
result += bibharvest_templates.tmpl_table_end()
return result
def perform_request_viewholdingpen(ln = CFG_SITE_LANG, confirm=0, start = 0, limit = -1):
data = get_holdingpen_entries(start, limit)
result = ""
result += build_holdingpen_table(data, ln)
return result
def perform_request_viewhprecord(hpupdate_id, ln = CFG_SITE_LANG, confirm=0):
_ = gettext_set_language(ln)
result = ""
try:
(oai_id, record_id, date_inserted, hprecord_content) = get_holdingpen_entry_details(hpupdate_id)
except:
return _("Error when retrieving the Holding Pen entry")
try:
db_rec = get_record(record_id)
db_MARC = create_marc_record(db_rec, record_id, {"text-marc": 1, "aleph-marc": 0})
#import rpdb2; rpdb2.start_embedded_debugger('password', fAllowRemote=True)
db_content = bibharvest_templates.tmpl_output_preformatted(db_MARC) # originally .encode("utf-8") ... does ot work
db_label = "Database version of record" + bibharvest_templates.tmpl_print_brs(ln, 1)
except:
return _("Error when retrieving the record")
try:
hp_rec = create_record(hprecord_content)[0]
hp_MARC = create_marc_record(hp_rec, record_id, {"text-marc": 1, "aleph-marc": 0})
hp_content = bibharvest_templates.tmpl_output_preformatted(hp_MARC) # originally .encode("utf-8") ... does ot work
hp_label = bibharvest_templates.tmpl_print_brs(ln, 2) + "Holdingpen version of record"\
+ bibharvest_templates.tmpl_print_brs(ln, 1)
except:
return _("Error when formatting the Holding Pen entry. Probably its content is broken")
submit_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/accepthprecord",
urlargd={'ln': ln,
'hpupdate_id': hpupdate_id},
link_label=_("Accept Holding Pen version"))
delete_link = create_html_link(urlbase=oai_harvest_admin_url + \
"/delhprecord",
urlargd={'ln': ln,
'oai_id': str(oai_id),
'date_inserted': str(date_inserted)},
link_label=_("Delete from holding pen"))
result = ""
result += db_label
result += db_content
result += hp_label
result += hp_content
result += delete_link + " "
result += submit_link
return result
def perform_request_delhprecord(hpupdate_id, ln = CFG_SITE_LANG, confirm = 0):
delete_holdingpen_entry(hpupdate_id)
return "Record deleted from the holding pen"
def perform_request_accepthprecord(hpupdate_id, ln = CFG_SITE_LANG, confirm = 0):
(_, _, _, record_xml) = get_holdingpen_entry_details(hpupdate_id)
delete_holdingpen_entry(hpupdate_id)
upload_record(record_xml)
return perform_request_view_holdingpen_tree("")
# new functions for the holding pen
def perform_request_gethpyears(prefix, filter):
years = get_holdingpen_years(filter)
result = ""
for year in years:
result += "<li id=\"%s_%s_%s\"><span>Year %s (%s entries)</span> <ul id=\"%s_%s_%s_ul\"></ul></li>" %(prefix, str(year[0]), filter, str(year[0]), str(year[1]), prefix, str(year[0]), filter)
return result
def perform_request_gethpyear(prefix, year, filter):
months = get_holdingpen_year(year, filter)
result = ""
for month in months:
result += "<li id=\"%s_%s_%s_%s\"><span>%s-%s (%s entries)</span> <ul id=\"%s_%s_%s_%s_ul\"></ul></li>" %(prefix, year, str(month[0]), filter, year, str(month[0]), str(month[1]), prefix, year, str(month[0]), filter)
return result
def perform_request_gethpmonth(prefix, year, month, filter):
days = get_holdingpen_month(year, month, filter)
result = ""
for day in days:
result += "<li id=\"%s_%s_%s_%s_%s\"><span>%s-%s-%s (%s entries)</span> <ul id=\"%s_%s_%s_%s_%s_ul\"></ul></li>" %(prefix, year, month, str(day[0]), filter, year, month, str(day[0]), str(day[1]), prefix, year, month, str(day[0]), filter)
return result
def perform_request_gethpdayfragment(year, month, day, limit, start, filter):
data = get_holdingpen_day_fragment(year, month, day, limit, start, filter)
return build_holdingpen_table(data, "en")
def view_holdingpen_headers():
return bibharvest_templates.tmpl_view_holdingpen_headers()
def perform_request_view_holdingpen_tree(filter):
return bibharvest_templates.tmpl_view_holdingpen_body( \
filter, perform_request_gethpyears("holdingpencontainer", filter))
##################################################################
### Here the functions to retrieve, modify, delete and add sources
##################################################################
def get_oai_src(oai_src_id=''):
"""Returns a row parameters for a given id"""
sql = "SELECT id,name,baseurl,metadataprefix,frequency,bibconvertcfgfile,postprocess,setspecs,bibfilterprogram FROM oaiHARVEST"
try:
if oai_src_id:
sql += " WHERE id=%s" % oai_src_id
sql += " ORDER BY id asc"
res = run_sql(sql)
return res
except StandardError, e:
return ""
def modify_oai_src(oai_src_id, oai_src_name, oai_src_baseurl, oai_src_prefix, oai_src_frequency, oai_src_config, oai_src_post, oai_src_sets=None, oai_src_bibfilter=''):
"""Modifies a row's parameters"""
if oai_src_sets is None:
oai_src_sets = []
try:
res = run_sql("UPDATE oaiHARVEST SET name=%s WHERE id=%s", (oai_src_name, oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET baseurl=%s WHERE id=%s", (oai_src_baseurl, oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET metadataprefix=%s WHERE id=%s", (oai_src_prefix, oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET frequency=%s WHERE id=%s", (oai_src_frequency, oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET bibconvertcfgfile=%s WHERE id=%s", (oai_src_config, oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET postprocess=%s WHERE id=%s", (oai_src_post, oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET setspecs=%s WHERE id=%s", (' '.join(oai_src_sets), oai_src_id))
res = run_sql("UPDATE oaiHARVEST SET bibfilterprogram=%s WHERE id=%s", (oai_src_bibfilter, oai_src_id))
return (1, "")
except StandardError, e:
return (0, e)
def add_oai_src(oai_src_name, oai_src_baseurl, oai_src_prefix, oai_src_frequency, oai_src_lastrun, oai_src_config, oai_src_post, oai_src_sets=None, oai_src_bibfilter=''):
"""Adds a new row to the database with the given parameters"""
if oai_src_sets is None:
oai_src_sets = []
try:
if oai_src_lastrun in [0, "0"]: lastrun_mode = 'NULL'
else:
lastrun_mode = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
# lastrun_mode = "'"+lastrun_mode+"'"
run_sql("INSERT INTO oaiHARVEST (id, baseurl, metadataprefix, arguments, comment, bibconvertcfgfile, name, lastrun, frequency, postprocess, bibfilterprogram, setspecs) VALUES (0, %s, %s, NULL, NULL, %s, %s, %s, %s, %s, %s, %s)", \
(oai_src_baseurl, oai_src_prefix, oai_src_config, oai_src_name, lastrun_mode, oai_src_frequency, oai_src_post, oai_src_bibfilter, " ".join(oai_src_sets)))
return (1, "")
except StandardError, e:
return (0, e)
def delete_oai_src(oai_src_id):
"""Deletes a row from the database according to its id"""
try:
res = run_sql("DELETE FROM oaiHARVEST WHERE id=%s" % oai_src_id)
return (1, "")
except StandardError, e:
return (0, e)
def get_tot_oai_src():
"""Returns number of rows in the database"""
try:
sql = "SELECT COUNT(*) FROM oaiHARVEST"
res = run_sql(sql)
return res[0][0]
except StandardError, e:
return ""
def get_update_status():
"""Returns a table showing a list of all rows and their LastUpdate status"""
try:
sql = "SELECT name,lastrun FROM oaiHARVEST ORDER BY lastrun desc"
res = run_sql(sql)
return res
except StandardError, e:
return ""
def get_next_schedule():
"""Returns the next scheduled oaiharvestrun tasks"""
try:
sql = "SELECT runtime,status FROM schTASK WHERE proc='oaiharvest' AND runtime > now() ORDER by runtime LIMIT 1"
res = run_sql(sql)
if len(res)>0:
return res[0]
else:
return ("", "")
except StandardError, e:
return ("","")
def validate(oai_src_baseurl):
"""This function validates a baseURL by opening its URL and 'greping' for the <OAI-PMH> and <Identify> tags:
Codes:
0 = okay
1 = baseURL not valid
2 = baseURL not found/not existing
3 = tmp directoy is not writable
4 = Unknown error
Returns tuple (code, message)
"""
try:
url = oai_src_baseurl + "?verb=Identify"
urllib.urlretrieve(url, tmppath)
# First check if we have xml oai-pmh output
grepOUT1 = os.popen('grep -iwc "<OAI-PMH" '+tmppath).read()
if int(grepOUT1) == 0:
# No.. we have an http error
return (4, os.popen('cat '+tmppath).read())
grepOUT2 = os.popen('grep -iwc "<identify>" '+tmppath).read()
if int(grepOUT2) > 0:
#print "Valid!"
return (0, '')
else:
#print "Not valid!"
return (1, '')
except IOError, (errno, strerror):
# Quick error handling for frequent error codes.
if errno == 13:
return (3, "Please check permission on %s and retry" % CFG_TMPDIR)
elif errno == 2 or errno == 'socket error':
return (2, "Could not connect with URL %s. Check URL or retry when server is available." % url)
return (4, strerror)
except StandardError, e:
return (4, "An unknown error has occured")
except InvalidURL, e:
return (2, "Could not connect with URL %s. Check URL or retry when server is available." % url)
def validatefile(oai_src_config):
"""This function checks whether the given path to text file exists or not
0 = okay
1 = file non existing
"""
CFG_BIBCONVERT_XSL_PATH = "%s%sbibconvert%sconfig" % (CFG_ETCDIR,
os.sep,
os.sep)
path_to_config = (CFG_BIBCONVERT_XSL_PATH + os.sep +
oai_src_config)
if os.path.exists(path_to_config):
# Try to read in config directory
try:
ftmp = open(path_to_config, 'r')
cfgstr = ftmp.read()
ftmp.close()
if cfgstr != "":
#print "Valid!"
return 0
except StandardError, e:
pass
# Try to read as complete path
try:
ftmp = open(oai_src_config, 'r')
cfgstr = ftmp.read()
ftmp.close()
if cfgstr != "":
#print "Valid!"
return 0
else:
#print "Not valid!"
return 1
except StandardError, e:
return 1
return 1
def findMetadataFormats(oai_src_baseurl):
"""This function finds the Metadata formats offered by a OAI repository by analysing the output of verb=ListMetadataFormats"""
formats = []
url = oai_src_baseurl + "?verb=ListMetadataFormats"
try:
urllib.urlretrieve(url, tmppath)
except IOError:
return formats
ftmp = open(tmppath, 'r')
xmlstr = ftmp.read()
ftmp.close()
chunks = xmlstr.split('<metadataPrefix>')
count = 0 # first chunk is invalid
for chunk in chunks:
if count != 0:
formats.append(chunk.split('</metadataPrefix>')[0])
count = count + 1
return formats
def findSets(oai_src_baseurl):
"""This function finds the sets offered by a OAI repository
by analysing the output of verb=ListSets.
Returns list of tuples(SetSpec, SetName)"""
url = oai_src_baseurl + "?verb=ListSets"
sets = {}
try:
urllib.urlretrieve(url, tmppath)
except IOError:
return sets
ftmp = open(tmppath, 'r')
xmlstr = ftmp.read()
ftmp.close()
chunks = xmlstr.split('<set>')
count = 0 # first chunk is invalid
for chunk in chunks:
if count != 0:
chunks_set = chunk.split('<setSpec>')[1].split("</setSpec>")
set_spec = chunks_set[0]
#chunks_set[1].split('<setName>')
check_set_2 = chunks_set[1].split("<setName>")
set_name = None
if len(check_set_2) > 1:
set_name = check_set_2[1].split("</setName>")[0]
sets[set_spec] = [set_spec, set_name]
count = count + 1
return sets.values()
|
valkyriesavage/invenio
|
modules/bibharvest/lib/oai_harvest_admin.py
|
Python
|
gpl-2.0
| 65,264
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.