code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
import _plotly_utils.basevalidators
class OrientationValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="orientation", parent_name="contour.colorbar", **kwargs
):
super(OrientationValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
values=kwargs.pop("values", ["h", "v"]),
**kwargs
)
|
plotly/plotly.py
|
packages/python/plotly/plotly/validators/contour/colorbar/_orientation.py
|
Python
|
mit
| 493
|
"""~directions "<start>" "<end>" (in quotes) return a result from the google maps result for directions from <start> to <end>"""
import re
import requests
import googlemaps
from googlemaps import Client as GoogleMaps
from secret_example import GOOGLE_DIRECTION_API
try:
from HTMLParser import HTMLParser
except ImportError:
from html.parser import HTMLParser
ERROR_MSG = "Some unknown error occured"
DIRECTIONS_BASEURL = "http://maps.googleapis.com/maps/api/directions/json"
#to strip HTML tags from the html_instructions string
class MLStripper(HTMLParser):
def __init__(self):
try:
self.reset()
self.strict = False
self.convert_charrefs= True
except:
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def strip_tags(html):
s = MLStripper()
s.feed(html)
return s.get_data()
def unescape(url):
# for unclear reasons, google replaces url escapes with \x escapes
return url.replace(r"\x", "%")
def directions(start, end, unsafe=False):
mapService = GoogleMaps(GOOGLE_DIRECTION_API)
payload = {'origin':start, 'destination':end}
result = requests.get(DIRECTIONS_BASEURL, params=payload)
result = result.json()
responce = ''
try:
if result['status'] == "OK":
for i in range (0, len (result['routes'][0]['legs'][0]['steps'])):
j = result['routes'][0]['legs'][0]['steps'][i]['html_instructions']
responce +=strip_tags(j)+'\n'
return responce
else:
''' Will be replaced with logging in future'''
print(result['status'])
print(result['error_message'])
return ERROR_MSG
except KeyError as e:
''' Need to be logged'''
return ERROR_MSG
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r'~directions \".+?\" \".+?\"', text)
if not match:
return
r = [f[1:-1] for f in re.findall('".+?"', match[0])]
return directions(r[0].encode("utf8"), r[1].encode("utf8"))
on_bot_message = on_message
|
Eulercoder/fabulous
|
fabulous/services/directions.py
|
Python
|
gpl-3.0
| 2,201
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
from apache.aurora.executor.common.executor_detector import ExecutorDetector
from apache.aurora.executor.common.path_detector import MesosPathDetector
class Match(object):
def __init__(self, root, slave, framework, executor, run):
self._groups = dict(
root=root, slave_id=slave, framework_id=framework, executor_id=executor, run=run)
def groups(self):
return self._groups
def test_path_detector():
ROOTS = ('/var/lib/mesos1/slaves', '/var/lib/mesos2/slaves')
FAKE_ROOT = '/var/blah/blah'
FAKE_CHECKPOINT_DIR = 'ckpt'
path1_symlink, path1, path2 = (
ExecutorDetector.path(Match(ROOTS[0], 'slave001', 'framework1', 'executor1', 'latest')),
ExecutorDetector.path(Match(ROOTS[0], 'slave001', 'framework1', 'executor1', 'asdf-ghjk')),
ExecutorDetector.path(Match(ROOTS[1], 'slave002', 'framework2', 'executor2', 'latest')),
)
with mock.patch('glob.glob', return_value=(path1_symlink, path1, path2)) as glob:
with mock.patch('os.path.islink', side_effect=(True, False, False)) as islink:
with mock.patch('os.path.exists', side_effect=(True, False)) as exists:
mpd = MesosPathDetector(root=FAKE_ROOT, sandbox_path=FAKE_CHECKPOINT_DIR)
paths = list(mpd.get_paths())
assert len(paths) == 1
assert paths == [os.path.join(path1, FAKE_CHECKPOINT_DIR)]
expected_glob_pattern = os.path.join(*ExecutorDetector.PATTERN) % {
'root': FAKE_ROOT,
'slave_id': '*',
'framework_id': '*',
'executor_id': '*',
'run': '*'
}
assert glob.mock_calls == [mock.call(expected_glob_pattern)]
assert islink.mock_calls == [
mock.call(os.path.join(path1_symlink)),
mock.call(os.path.join(path1)),
mock.call(os.path.join(path2)),
]
assert exists.mock_calls == [
mock.call(os.path.join(path1, FAKE_CHECKPOINT_DIR)),
mock.call(os.path.join(path2, FAKE_CHECKPOINT_DIR)),
]
|
thinker0/aurora
|
src/test/python/apache/aurora/executor/common/test_path_detector.py
|
Python
|
apache-2.0
| 2,572
|
__author__ = "James Baster (james@jarofgreen.co.uk)"
__copyright__ = "(C) 2009 James Baster. GNU GPL 3."
from django.forms.widgets import Widget
from django.utils.safestring import mark_safe
from django.forms.util import flatatt
from django.utils.encoding import StrAndUnicode, force_unicode
from itertools import chain
from django.utils.html import escape, conditional_escape
class SelectNewOrExisting(Widget):
def __init__(self, attrs=None, choices=(), old_data=()):
super(SelectNewOrExisting, self).__init__(attrs)
# choices can be any iterable, but we may need to render this widget
# multiple times. Thus, collapse it into a list so it can be consumed
# more than once.
self.choices = list(choices)
self.old_data = list(old_data)
def render(self, name, value, attrs=None, choices=()):
if value is None: value = ''
final_attrs = self.build_attrs(attrs, name=name)
final_attrs['onchange'] = "SelectNewOrExistingWidgetChange(this)"
output = [u'<select%s>' % flatatt(final_attrs)]
options = self.render_options(value,)
if options:
output.append(options)
if str(value)[0:4] == "NEW:":
output.append('</select><span id="id_'+name+'_new"> or new: <input type="text" style="width: 50%" name="'+name+'_New" value="'+escape(value[4:])+'"></span>')
else:
output.append('</select><span id="id_'+name+'_new"> or new: <input type="text" style="width: 50%" name="'+name+'_New"></span>')
output.append("<script>SelectNewOrExistingWidgetChange($('id_%s'))</script>" % name)
return mark_safe(u'\n'.join(output))
def render_options(self, value):
if str(value)[0:4] != "NEW:" and value:
value = int(value)
else:
value = 0
def render_option(option_value, option_label):
option_value = force_unicode(option_value)
selected_html = (int(option_value) == int(value)) and u' selected="selected"' or ''
return u'<option value="%s"%s>%s</option>' % (
escape(option_value), selected_html,
conditional_escape(force_unicode(option_label)))
output = []
output.append(render_option('0', ' '))
for option in self.old_data:
output.append(render_option(option.id, option.Title))
return u'\n'.join(output)
def value_from_datadict(self, data, files, name):
# firstly can we load an old one
if data.get(name):
id = int(data.get(name))
if id > 0:
return id
# no? return a new title
return "NEW:"+data.get(name+'_New','')
|
jarofgreen/MyJobSeek
|
myjobapps/widgetneworexisting.py
|
Python
|
gpl-3.0
| 2,400
|
import numpy as np
scores = np.array([0,1])
nScores = len(scores)
nClasses = 2
inputFile = "./data/crowdlabels_table_mixed.csv"
tableFormat = True
outputFile = "./data/test.out"
confMatFile = "./data/test_ibcc.mat"
goldFile = "./data/goldmat_mixed.csv"
nu0 = np.array([50,50])
alpha0 = np.array([[2, 1], [1, 2]])
|
CitizenScienceInAstronomyWorkshop/pyIBCC
|
python/tests/config/table_shortgoldmat.py
|
Python
|
mit
| 313
|
# -*- coding: utf-8 -*-
#
# commandify documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
from commandify.version import get_version
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
# extensions = ['sphinx.ext.autodoc']
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Commandify'
copyright = u'2014, Mark Muetzelfeldt'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = get_version()
# The full version, including alpha/beta/rc tags.
release = get_version('long')
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
html_theme_options = {
"sidebarwidth": 320,
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'Commandifydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'commandify.tex', u'Commandify Documentation',
u'Mark Muetzelfeldt', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'Commandify', u'Commandify Documentation',
[u'Mark Muetzelfeldt'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Commandify', u'Commandify Documentation',
u'Mark Muetzelfeldt', 'Commandify', 'Simple commands through decorators',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
|
markmuetz/commandify
|
docs/conf.py
|
Python
|
mit
| 8,399
|
# -*- coding: UTF-8 -*-
"""
Django settings for archer project.
Generated by 'django-admin startproject' using Django 1.8.17.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import pymysql
pymysql.install_as_MySQLdb()
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'hfusaf2m4ot#7)fkw#di2bu6(cv0@opwmafx5n#6=3d%x^hpl6'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# 解决nginx部署跳转404
USE_X_FORWARDED_HOST = True
# Application definition
INSTALLED_APPS = (
'django_admin_bootstrapped',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_apscheduler',
'sql',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'sql.check_login_middleware.CheckLoginMiddleware',
'sql.exception_logging_middleware.ExceptionLoggingMiddleware',
)
ROOT_URLCONF = 'archer.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'sql/static')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'sql.processor.global_info',
],
},
},
]
WSGI_APPLICATION = 'archer.wsgi.application'
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_TZ = False
# 时间格式化
USE_L10N = False
DATETIME_FORMAT = 'Y-m-d H:i:s'
DATE_FORMAT = 'Y-m-d'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# 扩展django admin里users字段用到,指定了sql/models.py里的class users
AUTH_USER_MODEL = "sql.users"
###############以下部分需要用户根据自己环境自行修改###################
# session 设置
SESSION_COOKIE_AGE = 60 * 30 # 30分钟
SESSION_SAVE_EVERY_REQUEST = True
SESSION_EXPIRE_AT_BROWSER_CLOSE = True # 关闭浏览器,则COOKIE失效
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# 该项目本身的mysql数据库地址
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'archer_github',
'USER': 'root',
'PASSWORD': '123456',
'HOST': '127.0.0.1',
'PORT': '3306'
}
}
# inception组件所在的地址
INCEPTION_HOST = '127.0.0.1'
INCEPTION_PORT = '6669'
# 查看回滚SQL时候会用到,这里要告诉archer去哪个mysql里读取inception备份的回滚信息和SQL.
# 注意这里要和inception组件的inception.conf里的inception_remote_XX部分保持一致.
INCEPTION_REMOTE_BACKUP_HOST = '192.168.1.12'
INCEPTION_REMOTE_BACKUP_PORT = 3306
INCEPTION_REMOTE_BACKUP_USER = 'inception'
INCEPTION_REMOTE_BACKUP_PASSWORD = 'inception'
# 账户登录失败锁定时间(秒)
LOCK_TIME_THRESHOLD = 300
# 账户登录失败 几次 锁账户
LOCK_CNT_THRESHOLD = 5
# LDAP
ENABLE_LDAP = False
if ENABLE_LDAP:
import ldap
# from django_auth_ldap.config import LDAPSearch, GroupOfNamesType
from django_auth_ldap.config import LDAPSearch, GroupOfUniqueNamesType
AUTHENTICATION_BACKENDS = (
'django_auth_ldap.backend.LDAPBackend', # 配置为先使用LDAP认证,如通过认证则不再使用后面的认证方式
'django.contrib.auth.backends.ModelBackend', # sso系统中手动创建的用户也可使用,优先级靠后。注意这2行的顺序
)
# if use self signed certificate, Remove AUTH_LDAP_GLOBAL_OPTIONS annotations
# AUTH_LDAP_GLOBAL_OPTIONS={
# ldap.OPT_X_TLS_REQUIRE_CERT: ldap.OPT_X_TLS_NEVER
# }
AUTH_LDAP_BIND_DN = "cn=xx,dc=xx,dc=xx"
AUTH_LDAP_BIND_PASSWORD = "xx"
AUTH_LDAP_SERVER_URI = "ldap://ldap.xx.com"
AUTH_LDAP_BASEDN = "dc=xx,dc=xx"
AUTH_LDAP_USER_DN_TEMPLATE = "cn=%(user)s,ou=xx,dc=xx,dc=xx"
AUTH_LDAP_GROUP_SEARCH = LDAPSearch("ou=xx,dc=xx,dc=xx",
ldap.SCOPE_SUBTREE, "(objectClass=groupOfUniqueNames)"
)
AUTH_LDAP_GROUP_TYPE = GroupOfUniqueNamesType()
AUTH_LDAP_ALWAYS_UPDATE_USER = True # 每次登录从ldap同步用户信息
AUTH_LDAP_USER_ATTR_MAP = { # key为archer.sql_users字段名,value为ldap中字段名,用于同步用户信息
"username": "xx",
"display": "xx",
"email": "xx"
}
# AUTH_LDAP_MIRROR_GROUPS = True # 直接把ldap的组复制到django一份,和AUTH_LDAP_FIND_GROUP_PERMS互斥.用户每次登录会根据ldap来更新数据库的组关系
# AUTH_LDAP_FIND_GROUP_PERMS = True # django从ldap的组权限中获取权限,这种方式,django自身不创建组,每次请求都调用ldap
# AUTH_LDAP_CACHE_GROUPS = True # 如打开FIND_GROUP_PERMS后,此配置生效,对组关系进行缓存,不用每次请求都调用ldap
# AUTH_LDAP_GROUP_CACHE_TIMEOUT = 600 # 缓存时间
# 开启以下配置注释,可以帮助调试ldap集成
LDAP_LOGS = '/tmp/ldap.log'
DEFAULT_LOGS = '/tmp/default.log'
stamdard_format = '[%(asctime)s][%(threadName)s:%(thread)d]' + \
'[task_id:%(name)s][%(filename)s:%(lineno)d] ' + \
'[%(levelname)s]- %(message)s'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': { # 详细
'format': stamdard_format
},
},
'handlers': {
'default': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': DEFAULT_LOGS,
'maxBytes': 1024 * 1024 * 100, # 5 MB
'backupCount': 5,
'formatter': 'standard',
},
'ldap': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': LDAP_LOGS,
'maxBytes': 1024 * 1024 * 100, # 5 MB
'backupCount': 5,
'formatter': 'standard',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
}
},
'loggers': {
'default': { # default日志,存放于log中
'handlers': ['default'],
'level': 'DEBUG',
},
# 'django.db': { # 打印SQL语句到console,方便开发
# 'handlers': ['console'],
# 'level': 'DEBUG',
# 'propagate': False,
# },
'django.request': { # 打印错误堆栈信息到console,方便开发
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False,
},
'django_auth_ldap': { # django_auth_ldap模块相关日志打印到console
'handlers': ['ldap'],
'level': 'DEBUG',
'propagate': True, # 选择关闭继承,不然这个logger继承自默认,日志就会被记录2次了(''一次,自己一次)
}
}
}
# 是否开启邮件提醒功能:发起SQL上线后会发送邮件提醒审核人审核,执行完毕会发送给DBA. on是开,off是关,配置为其他值均会被archer认为不开启邮件功能
MAIL_ON_OFF = 'off'
MAIL_SSL = False # 是否使用SSL
MAIL_REVIEW_SMTP_SERVER = 'mail.xxx.com'
MAIL_REVIEW_SMTP_PORT = 25
MAIL_REVIEW_FROM_ADDR = 'archer@xxx.com' # 发件人,也是登录SMTP server需要提供的用户名
MAIL_REVIEW_FROM_PASSWORD = '' # 发件人邮箱密码,如果为空则不需要login SMTP server
# 是否过滤【DROP DATABASE】|【DROP TABLE】|【TRUNCATE PARTITION】|【TRUNCATE TABLE】等高危DDL操作:
# on是开,会首先用正则表达式匹配sqlContent,如果匹配到高危DDL操作,则判断为“自动审核不通过”;off是关,直接将所有的SQL语句提交给inception,对于上述高危DDL操作,只备份元数据
CRITICAL_DDL_ON_OFF = 'off'
# 是否开启SQL查询功能,关闭会隐藏菜单和相关功能
QUERY = True
# 当inception语法树打印失败时在线查询的结果控制,建议修改inception变量inception_enable_select_star=OFF,否则select * 会报错
# True是开启校验,失败不允许继续执行并返回错,
# False是关闭校验,继续执行,关闭校验会导致解析失败的查询表权限验证和脱敏功能失效
CHECK_QUERY_ON_OFF = True
# 是否开启动态脱敏查询,采取正则遍历处理结果集的方式,会影响部分查询效率
DATA_MASKING_ON_OFF = True
# 管理员在线查询的结果集限制
ADMIN_QUERY_LIMIT = 5000
# 是否开启慢日志管理,关闭会隐藏菜单和相关功能
SLOWQUERY = False
# sqladvisor的路径配置,如'/opt/SQLAdvisor/sqladvisor/sqladvisor',''代表关闭,隐藏菜单和相关功能
SQLADVISOR = ''
# 是否开启AliYunRDS管理
ALIYUN_RDS_MANAGE = False
|
jly8866/archer
|
archer/settings.py
|
Python
|
apache-2.0
| 10,140
|
"""
Include the sensor modules that you have created here.
Kervi will load the sensors that are imported here.
"""
#from . import my_sensor
from . import system_sensors
|
kervi/can-bot
|
app/sensors/__init__.py
|
Python
|
mit
| 176
|
# // license
# Copyright 2016, Free Software Foundation.
#
# This file is part of Solent.
#
# Solent is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Solent is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# Solent. If not, see <http://www.gnu.org/licenses/>.
#
# // overview
# Sandbox used for developing gollop-style selection within spin_term.
from solent import Engine
from solent import init_logging
from solent import log
from solent import solent_cpair
from solent import SolentQuitException
from solent import uniq
from solent.util import SpinSelectionUi
from collections import deque
import os
import sys
import time
import traceback
MTU = 1500
CONSOLE_WIDTH = 60
CONSOLE_HEIGHT = 20
I_NEARCAST_SCHEMA = '''
i message h
i field h
message keystroke
field keycode
'''
class CogInterpreter(object):
def __init__(self, cog_h, engine, orb):
self.cog_h = cog_h
self.engine = engine
self.orb = orb
def on_keystroke(self, keycode):
log('key received %s'%keycode)
if keycode == ord('Q'):
raise SolentQuitException()
class CogTerm(object):
def __init__(self, cog_h, engine, orb):
self.cog_h = cog_h
self.engine = engine
self.orb = orb
#
self.counter = 0
self.spin_term = self.engine.init_spin(
construct=SpinSelectionUi,
console_type='pygame',
cb_selui_keycode=self.cb_selui_keycode,
cb_selui_lselect=self.cb_selui_lselect)
self.spin_term.open_console(
width=CONSOLE_WIDTH,
height=CONSOLE_HEIGHT)
self.spin_term.write(
drop=0,
rest=0,
s='Escape toggles selection mode.',
cpair=solent_cpair('green'))
self.spin_term.write(
drop=1,
rest=0,
s='Press Q to quit (when selection mode is off).',
cpair=solent_cpair('green'))
#
def orb_turn(self, activity):
self.spin_term.write(
drop=6,
rest=2,
s=self.counter,
cpair=solent_cpair('blue'))
self.counter += 1
#
def cb_selui_keycode(self, cs_selui_keycode):
keycode = cs_selui_keycode.keycode
#
self.nearcast.keystroke(
keycode=keycode)
def cb_selui_lselect(self, cs_selui_lselect):
drop = cs_selui_lselect.drop
rest = cs_selui_lselect.rest
c = cs_selui_lselect.c
cpair = cs_selui_lselect.cpair
#
# user makes a selection
log('xxx cb_selui_lselect drop %s rest %s'%(drop, rest))
def main():
init_logging()
#
engine = None
try:
engine = Engine(
mtu=MTU)
engine.default_timeout = 0.04
#
orb = engine.init_orb(
i_nearcast=I_NEARCAST_SCHEMA)
orb.init_cog(CogInterpreter)
orb.init_cog(CogTerm)
engine.event_loop()
except SolentQuitException:
pass
except:
traceback.print_exc()
finally:
if engine != None:
engine.close()
if __name__ == '__main__':
main()
|
solent-eng/solent
|
solent/draft/gollop_box.py
|
Python
|
lgpl-3.0
| 3,595
|
#!/usr/bin/env python -u
import sys
from argparse import ArgumentParser
import time
import json
import yaml
import os
import requests
try:
import boto.ec2
import boto.sqs
from boto.vpc import VPCConnection
from boto.exception import NoAuthHandlerFound, EC2ResponseError
from boto.sqs.message import RawMessage
from boto.ec2.blockdevicemapping import BlockDeviceType, BlockDeviceMapping
except ImportError:
print "boto required for script"
sys.exit(1)
from pprint import pprint
AMI_TIMEOUT = 2700 # time to wait for AMIs to complete(45 minutes)
EC2_RUN_TIMEOUT = 180 # time to wait for ec2 state transition
EC2_STATUS_TIMEOUT = 300 # time to wait for ec2 system status checks
NUM_TASKS = 5 # number of tasks for time summary report
NUM_PLAYBOOKS = 2
class Unbuffered:
"""
For unbuffered output, not
needed if PYTHONUNBUFFERED is set
"""
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = Unbuffered(sys.stdout)
def parse_args():
parser = ArgumentParser()
parser.add_argument('--noop', action='store_true',
help="don't actually run the cmds",
default=False)
parser.add_argument('--secure-vars-file', required=False,
metavar="SECURE_VAR_FILE", default=None,
help="path to secure-vars from the root of "
"the secure repo. By default <deployment>.yml and "
"<environment>-<deployment>.yml will be used if they "
"exist in <secure-repo>/ansible/vars/. This secure file "
"will be used in addition to these if they exist.")
parser.add_argument('--stack-name',
help="defaults to ENVIRONMENT-DEPLOYMENT",
metavar="STACK_NAME",
required=False)
parser.add_argument('-p', '--play',
help='play name without the yml extension',
metavar="PLAY", required=True)
parser.add_argument('--playbook-dir',
help='directory to find playbooks in',
default='configuration/playbooks/edx-east',
metavar="PLAYBOOKDIR", required=False)
parser.add_argument('-d', '--deployment', metavar="DEPLOYMENT",
required=True)
parser.add_argument('-e', '--environment', metavar="ENVIRONMENT",
required=True)
parser.add_argument('-v', '--verbose', action='store_true',
help="turn on verbosity")
parser.add_argument('--no-cleanup', action='store_true',
help="don't cleanup on failures")
parser.add_argument('--vars', metavar="EXTRA_VAR_FILE",
help="path to extra var file", required=False)
parser.add_argument('--configuration-version', required=False,
help="configuration repo gitref",
default="master")
parser.add_argument('--configuration-secure-version', required=False,
help="configuration-secure repo gitref",
default="master")
parser.add_argument('--configuration-secure-repo', required=False,
default="git@github.com:edx-ops/prod-secure",
help="repo to use for the secure files")
parser.add_argument('--configuration-private-version', required=False,
help="configuration-private repo gitref",
default="master")
parser.add_argument('--configuration-private-repo', required=False,
default="git@github.com:edx-ops/ansible-private",
help="repo to use for private playbooks")
parser.add_argument('-c', '--cache-id', required=True,
help="unique id to use as part of cache prefix")
parser.add_argument('-i', '--identity', required=False,
help="path to identity file for pulling "
"down configuration-secure",
default=None)
parser.add_argument('-r', '--region', required=False,
default="us-east-1",
help="aws region")
parser.add_argument('-k', '--keypair', required=False,
default="deployment",
help="AWS keypair to use for instance")
parser.add_argument('-t', '--instance-type', required=False,
default="m1.large",
help="instance type to launch")
parser.add_argument("--role-name", required=False,
default="abbey",
help="IAM role name to use (must exist)")
parser.add_argument("--msg-delay", required=False,
default=5,
help="How long to delay message display from sqs "
"to ensure ordering")
parser.add_argument("--hipchat-room-id", required=False,
default=None,
help="The API ID of the Hipchat room to post"
"status messages to")
parser.add_argument("--ansible-hipchat-room-id", required=False,
default='Hammer',
help="The room used by the abbey instance for "
"printing verbose ansible run data.")
parser.add_argument("--hipchat-api-token", required=False,
default=None,
help="The API token for Hipchat integration")
parser.add_argument("--callback-url", required=False,
default=None,
help="The callback URL to send notifications to")
parser.add_argument("--root-vol-size", required=False,
default=50,
help="The size of the root volume to use for the "
"abbey instance.")
parser.add_argument("--datadog-api-key", required=False,
default="",
help="The datadog api key used for capturing task"
"and playbook metrics abbey instance.")
group = parser.add_mutually_exclusive_group()
group.add_argument('-b', '--base-ami', required=False,
help="ami to use as a base ami",
default="ami-0568456c")
group.add_argument('--blessed', action='store_true',
help="Look up blessed ami for env-dep-play.",
default=False)
return parser.parse_args()
def get_instance_sec_group(vpc_id):
grp_details = ec2.get_all_security_groups(
filters={
'vpc_id': vpc_id,
'tag:play': args.play
}
)
if len(grp_details) < 1:
sys.stderr.write("ERROR: Expected atleast one security group, got {}\n".format(
len(grp_details)))
return grp_details[0].id
def get_blessed_ami():
images = ec2.get_all_images(
filters={
'tag:environment': args.environment,
'tag:deployment': args.deployment,
'tag:play': args.play,
'tag:blessed': True
}
)
if len(images) != 1:
raise Exception("ERROR: Expected only one blessed ami, got {}\n".format(
len(images)))
return images[0].id
def create_instance_args():
"""
Looks up security group, subnet
and returns arguments to pass into
ec2.run_instances() including
user data
"""
vpc = VPCConnection()
subnet = vpc.get_all_subnets(
filters={
'tag:aws:cloudformation:stack-name': stack_name,
'tag:play': args.play}
)
if len(subnet) < 1:
#
# try scheme for non-cloudformation builds
#
subnet = vpc.get_all_subnets(
filters={
'tag:cluster': args.play,
'tag:environment': args.environment,
'tag:deployment': args.deployment}
)
if len(subnet) < 1:
sys.stderr.write("ERROR: Expected at least one subnet, got {}\n".format(
len(subnet)))
sys.exit(1)
subnet_id = subnet[0].id
vpc_id = subnet[0].vpc_id
security_group_id = get_instance_sec_group(vpc_id)
if args.identity:
config_secure = 'true'
with open(args.identity) as f:
identity_contents = f.read()
else:
config_secure = 'false'
identity_contents = "dummy"
user_data = """#!/bin/bash
set -x
set -e
exec > >(tee /var/log/user-data.log|logger -t user-data -s 2>/dev/console) 2>&1
base_dir="/var/tmp/edx-cfg"
extra_vars="$base_dir/extra-vars-$$.yml"
secure_identity="$base_dir/secure-identity"
git_ssh="$base_dir/git_ssh.sh"
configuration_version="{configuration_version}"
configuration_secure_version="{configuration_secure_version}"
configuration_private_version="{configuration_private_version}"
environment="{environment}"
deployment="{deployment}"
play="{play}"
config_secure={config_secure}
git_repo_name="configuration"
git_repo="https://github.com/edx/$git_repo_name"
git_repo_secure="{configuration_secure_repo}"
git_repo_secure_name=$(basename $git_repo_secure .git)
git_repo_private="{configuration_private_repo}"
git_repo_private_name=$(basename $git_repo_private .git)
secure_vars_file={secure_vars_file}
environment_deployment_secure_vars="$base_dir/$git_repo_secure_name/ansible/vars/{environment}-{deployment}.yml"
deployment_secure_vars="$base_dir/$git_repo_secure_name/ansible/vars/{deployment}.yml"
instance_id=\\
$(curl http://169.254.169.254/latest/meta-data/instance-id 2>/dev/null)
instance_ip=\\
$(curl http://169.254.169.254/latest/meta-data/local-ipv4 2>/dev/null)
instance_type=\\
$(curl http://169.254.169.254/latest/meta-data/instance-type 2>/dev/null)
playbook_dir="$base_dir/{playbook_dir}"
if $config_secure; then
git_cmd="env GIT_SSH=$git_ssh git"
else
git_cmd="git"
fi
ANSIBLE_ENABLE_SQS=true
SQS_NAME={queue_name}
SQS_REGION=us-east-1
SQS_MSG_PREFIX="[ $instance_id $instance_ip $environment-$deployment $play ]"
PYTHONUNBUFFERED=1
HIPCHAT_TOKEN={hipchat_token}
HIPCHAT_ROOM={hipchat_room}
HIPCHAT_MSG_PREFIX="$environment-$deployment-$play: "
HIPCHAT_FROM="ansible-$instance_id"
HIPCHAT_MSG_COLOR=$(echo -e "yellow\\ngreen\\npurple\\ngray" | shuf | head -1)
DATADOG_API_KEY={datadog_api_key}
# environment for ansible
export ANSIBLE_ENABLE_SQS SQS_NAME SQS_REGION SQS_MSG_PREFIX PYTHONUNBUFFERED
export HIPCHAT_TOKEN HIPCHAT_ROOM HIPCHAT_MSG_PREFIX HIPCHAT_FROM
export HIPCHAT_MSG_COLOR DATADOG_API_KEY
if [[ ! -x /usr/bin/git || ! -x /usr/bin/pip ]]; then
echo "Installing pkg dependencies"
/usr/bin/apt-get update
/usr/bin/apt-get install -y git python-pip python-apt \\
git-core build-essential python-dev libxml2-dev \\
libxslt-dev curl libmysqlclient-dev --force-yes
fi
# upgrade setuptools early to avoid no distributin errors
pip install --upgrade setuptools==18.3.2
rm -rf $base_dir
mkdir -p $base_dir
cd $base_dir
cat << EOF > $git_ssh
#!/bin/sh
exec /usr/bin/ssh -o StrictHostKeyChecking=no -i "$secure_identity" "\$@"
EOF
chmod 755 $git_ssh
if $config_secure; then
cat << EOF > $secure_identity
{identity_contents}
EOF
fi
cat << EOF >> $extra_vars
---
# extra vars passed into
# abbey.py including versions
# of all the repositories
{extra_vars_yml}
# abbey will always run fake migrations
# this is so that the application can come
# up healthy
fake_migrations: true
disable_edx_services: true
COMMON_TAG_EC2_INSTANCE: true
# abbey should never take instances in
# and out of elbs
elb_pre_post: false
EOF
chmod 400 $secure_identity
$git_cmd clone $git_repo $git_repo_name
cd $git_repo_name
$git_cmd checkout $configuration_version
cd $base_dir
if $config_secure; then
$git_cmd clone $git_repo_secure $git_repo_secure_name
cd $git_repo_secure_name
$git_cmd checkout $configuration_secure_version
cd $base_dir
fi
if [[ ! -z $git_repo_private ]]; then
$git_cmd clone $git_repo_private $git_repo_private_name
cd $git_repo_private_name
$git_cmd checkout $configuration_private_version
cd $base_dir
fi
cd $base_dir/$git_repo_name
sudo pip install -r pre-requirements.txt
sudo pip install -r requirements.txt
cd $playbook_dir
if [[ -r "$deployment_secure_vars" ]]; then
extra_args_opts+=" -e@$deployment_secure_vars"
fi
if [[ -r "$environment_deployment_secure_vars" ]]; then
extra_args_opts+=" -e@$environment_deployment_secure_vars"
fi
if $secure_vars_file; then
extra_args_opts+=" -e@$secure_vars_file"
fi
extra_args_opts+=" -e@$extra_vars"
ansible-playbook -vvvv -c local -i "localhost," $play.yml $extra_args_opts
ansible-playbook -vvvv -c local -i "localhost," stop_all_edx_services.yml $extra_args_opts
rm -rf $base_dir
""".format(
hipchat_token=args.hipchat_api_token,
hipchat_room=args.ansible_hipchat_room_id,
configuration_version=args.configuration_version,
configuration_secure_version=args.configuration_secure_version,
configuration_secure_repo=args.configuration_secure_repo,
configuration_private_version=args.configuration_private_version,
configuration_private_repo=args.configuration_private_repo,
environment=args.environment,
deployment=args.deployment,
play=args.play,
playbook_dir=args.playbook_dir,
config_secure=config_secure,
identity_contents=identity_contents,
queue_name=run_id,
extra_vars_yml=extra_vars_yml,
secure_vars_file=secure_vars_file,
cache_id=args.cache_id,
datadog_api_key=args.datadog_api_key)
mapping = BlockDeviceMapping()
root_vol = BlockDeviceType(size=args.root_vol_size,
volume_type='gp2')
mapping['/dev/sda1'] = root_vol
ec2_args = {
'security_group_ids': [security_group_id],
'subnet_id': subnet_id,
'key_name': args.keypair,
'image_id': base_ami,
'instance_type': args.instance_type,
'instance_profile_name': args.role_name,
'user_data': user_data,
'block_device_map': mapping,
}
return ec2_args
def poll_sqs_ansible():
"""
Prints events to the console and
blocks until a final STATS ansible
event is read off of SQS.
SQS does not guarantee FIFO, for that
reason there is a buffer that will delay
messages before they are printed to the
console.
Returns length of the ansible run.
"""
oldest_msg_ts = 0
buf = []
task_report = [] # list of tasks for reporting
last_task = None
completed = 0
while True:
messages = []
while True:
# get all available messages on the queue
msgs = sqs_queue.get_messages(attributes='All')
if not msgs:
break
messages.extend(msgs)
for message in messages:
recv_ts = float(
message.attributes['ApproximateFirstReceiveTimestamp']) * .001
sent_ts = float(message.attributes['SentTimestamp']) * .001
try:
msg_info = {
'msg': json.loads(message.get_body()),
'sent_ts': sent_ts,
'recv_ts': recv_ts,
}
buf.append(msg_info)
except ValueError as e:
print "!!! ERROR !!! unable to parse queue message, " \
"expecting valid json: {} : {}".format(
message.get_body(), e)
if not oldest_msg_ts or recv_ts < oldest_msg_ts:
oldest_msg_ts = recv_ts
sqs_queue.delete_message(message)
now = int(time.time())
if buf:
try:
if (now - min([msg['recv_ts'] for msg in buf])) > args.msg_delay:
# sort by TS instead of recv_ts
# because the sqs timestamp is not as
# accurate
buf.sort(key=lambda k: k['msg']['TS'])
to_disp = buf.pop(0)
if 'START' in to_disp['msg']:
print '\n{:0>2.0f}:{:0>5.2f} {} : Starting "{}"'.format(
to_disp['msg']['TS'] / 60,
to_disp['msg']['TS'] % 60,
to_disp['msg']['PREFIX'],
to_disp['msg']['START']),
elif 'TASK' in to_disp['msg']:
print "\n{:0>2.0f}:{:0>5.2f} {} : {}".format(
to_disp['msg']['TS'] / 60,
to_disp['msg']['TS'] % 60,
to_disp['msg']['PREFIX'],
to_disp['msg']['TASK']),
last_task = to_disp['msg']['TASK']
elif 'OK' in to_disp['msg']:
if args.verbose:
print "\n"
for key, value in to_disp['msg']['OK'].iteritems():
print " {:<15}{}".format(key, value)
else:
invocation = to_disp['msg']['OK']['invocation']
module = invocation['module_name']
# 'set_fact' does not provide a changed value.
if module == 'set_fact':
changed = "OK"
elif to_disp['msg']['OK']['changed']:
changed = "*OK*"
else:
changed = "OK"
print " {}".format(changed),
task_report.append({
'TASK': last_task,
'INVOCATION': to_disp['msg']['OK']['invocation'],
'DELTA': to_disp['msg']['delta'],
})
elif 'FAILURE' in to_disp['msg']:
print " !!!! FAILURE !!!!",
for key, value in to_disp['msg']['FAILURE'].iteritems():
print " {:<15}{}".format(key, value)
raise Exception("Failed Ansible run")
elif 'STATS' in to_disp['msg']:
print "\n{:0>2.0f}:{:0>5.2f} {} : COMPLETE".format(
to_disp['msg']['TS'] / 60,
to_disp['msg']['TS'] % 60,
to_disp['msg']['PREFIX'])
# Since 3 ansible plays get run.
# We see the COMPLETE message 3 times
# wait till the last one to end listening
# for new messages.
completed += 1
if completed >= NUM_PLAYBOOKS:
return (to_disp['msg']['TS'], task_report)
except KeyError:
print "Failed to print status from message: {}".format(to_disp)
if not messages:
# wait 1 second between sqs polls
time.sleep(1)
def create_ami(instance_id, name, description):
params = {'instance_id': instance_id,
'name': name,
'description': description,
'no_reboot': True}
AWS_API_WAIT_TIME = 1
image_id = ec2.create_image(**params)
print("Checking if image is ready.")
for _ in xrange(AMI_TIMEOUT):
try:
img = ec2.get_image(image_id)
if img.state == 'available':
print("Tagging image.")
img.add_tag("environment", args.environment)
time.sleep(AWS_API_WAIT_TIME)
img.add_tag("deployment", args.deployment)
time.sleep(AWS_API_WAIT_TIME)
img.add_tag("play", args.play)
time.sleep(AWS_API_WAIT_TIME)
conf_tag = "{} {}".format("http://github.com/edx/configuration", args.configuration_version)
img.add_tag("version:configuration", conf_tag)
time.sleep(AWS_API_WAIT_TIME)
conf_secure_tag = "{} {}".format(args.configuration_secure_repo, args.configuration_secure_version)
img.add_tag("version:configuration_secure", conf_secure_tag)
time.sleep(AWS_API_WAIT_TIME)
img.add_tag("cache_id", args.cache_id)
time.sleep(AWS_API_WAIT_TIME)
# Get versions from the instance.
tags = ec2.get_all_tags(filters={'resource-id': instance_id})
for tag in tags:
if tag.name.startswith('version:'):
img.add_tag(tag.name, tag.value)
time.sleep(AWS_API_WAIT_TIME)
break
else:
time.sleep(1)
except EC2ResponseError as e:
if e.error_code == 'InvalidAMIID.NotFound':
time.sleep(1)
else:
raise Exception("Unexpected error code: {}".format(
e.error_code))
time.sleep(1)
else:
raise Exception("Timeout waiting for AMI to finish")
return image_id
def launch_and_configure(ec2_args):
"""
Creates an sqs queue, launches an ec2 instance,
configures it and creates an AMI. Polls
SQS for updates
"""
print "{:<40}".format(
"Creating SQS queue and launching instance for {}:".format(run_id))
print
for k, v in ec2_args.iteritems():
if k != 'user_data':
print " {:<25}{}".format(k, v)
print
global sqs_queue
global instance_id
sqs_queue = sqs.create_queue(run_id)
sqs_queue.set_message_class(RawMessage)
res = ec2.run_instances(**ec2_args)
inst = res.instances[0]
instance_id = inst.id
print "{:<40}".format(
"Waiting for instance {} to reach running status:".format(instance_id)),
status_start = time.time()
for _ in xrange(EC2_RUN_TIMEOUT):
try:
res = ec2.get_all_instances(instance_ids=[instance_id])
except EC2ResponseError as e:
if e.code == "InvalidInstanceID.NotFound":
print("Instance not found({}), will try again.".format(
instance_id))
time.sleep(1)
continue
else:
raise(e)
if res[0].instances[0].state == 'running':
status_delta = time.time() - status_start
run_summary.append(('EC2 Launch', status_delta))
print "[ OK ] {:0>2.0f}:{:0>2.0f}".format(
status_delta / 60,
status_delta % 60)
break
else:
time.sleep(1)
else:
raise Exception("Timeout waiting for running status: {} ".format(
instance_id))
print "{:<40}".format("Waiting for system status:"),
system_start = time.time()
for _ in xrange(EC2_STATUS_TIMEOUT):
status = ec2.get_all_instance_status(inst.id)
if status[0].system_status.status == u'ok':
system_delta = time.time() - system_start
run_summary.append(('EC2 Status Checks', system_delta))
print "[ OK ] {:0>2.0f}:{:0>2.0f}".format(
system_delta / 60,
system_delta % 60)
break
else:
time.sleep(1)
else:
raise Exception("Timeout waiting for status checks: {} ".format(
instance_id))
print
print "{:<40}".format(
"Waiting for user-data, polling sqs for Ansible events:")
(ansible_delta, task_report) = poll_sqs_ansible()
run_summary.append(('Ansible run', ansible_delta))
print
print "{} longest Ansible tasks (seconds):".format(NUM_TASKS)
for task in sorted(
task_report, reverse=True,
key=lambda k: k['DELTA'])[:NUM_TASKS]:
print "{:0>3.0f} {}".format(task['DELTA'], task['TASK'])
print " - {}".format(task['INVOCATION'])
print
print "{:<40}".format("Creating AMI:"),
ami_start = time.time()
ami = create_ami(instance_id, run_id, run_id)
ami_delta = time.time() - ami_start
print "[ OK ] {:0>2.0f}:{:0>2.0f}".format(
ami_delta / 60,
ami_delta % 60)
run_summary.append(('AMI Build', ami_delta))
total_time = time.time() - start_time
all_stages = sum(run[1] for run in run_summary)
if total_time - all_stages > 0:
run_summary.append(('Other', total_time - all_stages))
run_summary.append(('Total', total_time))
return run_summary, ami
def send_hipchat_message(message):
print(message)
if args.callback_url:
r=requests.get("{}/{}".format(args.callback_url, message))
else:
#If hipchat is configured send the details to the specified room
if args.hipchat_api_token and args.hipchat_room_id:
import hipchat
try:
hipchat = hipchat.HipChat(token=args.hipchat_api_token)
hipchat.message_room(args.hipchat_room_id, 'AbbeyNormal',
message)
except Exception as e:
print("Hipchat messaging resulted in an error: %s." % e)
if __name__ == '__main__':
args = parse_args()
run_summary = []
start_time = time.time()
if args.vars:
with open(args.vars) as f:
extra_vars_yml = f.read()
extra_vars = yaml.load(extra_vars_yml)
else:
extra_vars_yml = ""
extra_vars = {}
if args.secure_vars_file:
# explicit path to a single
# secure var file
secure_vars_file = args.secure_vars_file
else:
secure_vars_file = 'false'
if args.stack_name:
stack_name = args.stack_name
else:
stack_name = "{}-{}".format(args.environment, args.deployment)
try:
ec2 = boto.ec2.connect_to_region(args.region)
except NoAuthHandlerFound:
print 'Unable to connect to ec2 in region :{}'.format(args.region)
sys.exit(1)
try:
sqs = boto.sqs.connect_to_region(args.region)
except NoAuthHandlerFound:
print 'Unable to connect to sqs in region :{}'.format(args.region)
sys.exit(1)
if args.blessed:
base_ami = get_blessed_ami()
else:
base_ami = args.base_ami
error_in_abbey_run = False
try:
sqs_queue = None
instance_id = None
run_id = "{}-abbey-{}-{}-{}".format(
int(time.time() * 100), args.environment, args.deployment, args.play)
ec2_args = create_instance_args()
if args.noop:
print "Would have created sqs_queue with id: {}\nec2_args:".format(
run_id)
pprint(ec2_args)
ami = "ami-00000"
else:
run_summary, ami = launch_and_configure(ec2_args)
print
print "Summary:\n"
for run in run_summary:
print "{:<30} {:0>2.0f}:{:0>5.2f}".format(
run[0], run[1] / 60, run[1] % 60)
print "AMI: {}".format(ami)
message = 'Finished baking AMI {image_id} for {environment} {deployment} {play}.'.format(
image_id=ami,
environment=args.environment,
deployment=args.deployment,
play=args.play)
send_hipchat_message(message)
except Exception as e:
message = 'An error occurred building AMI for {environment} ' \
'{deployment} {play}. The Exception was {exception}'.format(
environment=args.environment,
deployment=args.deployment,
play=args.play,
exception=repr(e))
send_hipchat_message(message)
error_in_abbey_run = True
finally:
print
if not args.no_cleanup and not args.noop:
if sqs_queue:
print "Cleaning up - Removing SQS queue - {}".format(run_id)
sqs.delete_queue(sqs_queue)
if instance_id:
print "Cleaning up - Terminating instance ID - {}".format(
instance_id)
# Check to make sure we have an instance id.
if instance_id:
ec2.terminate_instances(instance_ids=[instance_id])
if error_in_abbey_run:
exit(1)
|
lgfa29/configuration
|
util/vpc-tools/abbey.py
|
Python
|
agpl-3.0
| 28,957
|
"""
Mypy plugin to aid with typechecking code that uses Crochet.
"""
import typing
from typing import Callable, Optional
from mypy.plugin import FunctionContext, Plugin # pylint: disable=no-name-in-module
from mypy.types import CallableType, Type, get_proper_type # pylint: disable=no-name-in-module
def plugin(_version: str) -> typing.Type[Plugin]:
return CrochetMypyPlugin
class CrochetMypyPlugin(Plugin):
"""
Assists mypy with type checking APIs not (yet) fully covered by Python's
type hint annotation types, by copying run_in_reactor decorated function's
argument types to the type mypy deduces for the wrapped function.
"""
def get_function_hook(
self,
fullname: str,
) -> Optional[Callable[[FunctionContext], Type]]:
if fullname == "crochet.run_in_reactor":
return _copyargs_callback
return None
def _copyargs_callback(ctx: FunctionContext) -> Type:
"""
Copy the parameters from the signature of the type of the argument of the
call to the signature of the return type.
"""
original_return_type = ctx.default_return_type
if not ctx.arg_types or len(ctx.arg_types[0]) != 1:
return original_return_type
arg_type = get_proper_type(ctx.arg_types[0][0])
default_return_type = get_proper_type(original_return_type)
if not (
isinstance(arg_type, CallableType)
and isinstance(default_return_type, CallableType)
):
return original_return_type
return default_return_type.copy_modified(
arg_types=arg_type.arg_types,
arg_kinds=arg_type.arg_kinds,
arg_names=arg_type.arg_names,
variables=arg_type.variables,
is_ellipsis_args=arg_type.is_ellipsis_args,
)
|
itamarst/crochet
|
crochet/mypy.py
|
Python
|
mit
| 1,759
|
#!/usr/bin/env python3
import json
from ..basics.elements import *
from ..utils.utils import *
class Element:
def __init__(self, xml_element):
self.element = {}
for elem in Elements:
if elem not in (Elements.Type, Elements.Text):
self.element[elem.name] = get_element_prop(xml_element, elem)
elif elem == Elements.Type:
(self.element['Layer'], self.element[elem.name]) = get_layer_and_type(break_complex_type_layer(get_element_prop(xml_element, elem)))
else:
continue
self.element['relationships'] = []
self.element['properties'] = {}
def to_dict(self):
return self.element
def to_json(self):
return json.dumps(self.element, ensure_ascii = False)
def pretty_print(self):
print(json.dumps(self.element, sort_keys = True, indent = 4, separators = (', ', ': '), ensure_ascii = False))
def add_relation(self, r):
self.element['relationships'].append(r.to_dict())
def id(self):
return self.element[Elements.Identifier.name]
def set_properties(self, props):
self.element['properties'] = props
|
RafaAguilar/archi2mongodb
|
archimate2mongodb/pkg/elements/element.py
|
Python
|
mit
| 1,195
|
# Copyright 2016-2017 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import struct
import urllib.parse as urlparse
import urllib3
from tempest.api.compute import base
from tempest.common import compute
from tempest import config
from tempest.lib import decorators
CONF = config.CONF
class NoVNCConsoleTestJSON(base.BaseV2ComputeTest):
"""Test novnc console"""
create_default_network = True
@classmethod
def skip_checks(cls):
super(NoVNCConsoleTestJSON, cls).skip_checks()
if not CONF.compute_feature_enabled.vnc_console:
raise cls.skipException('VNC Console feature is disabled.')
def setUp(self):
super(NoVNCConsoleTestJSON, self).setUp()
self._websocket = None
def tearDown(self):
super(NoVNCConsoleTestJSON, self).tearDown()
if self._websocket is not None:
self._websocket.close()
# NOTE(zhufl): Because server_check_teardown will raise Exception
# which will prevent other cleanup steps from being executed, so
# server_check_teardown should be called after super's tearDown.
self.server_check_teardown()
@classmethod
def setup_clients(cls):
super(NoVNCConsoleTestJSON, cls).setup_clients()
cls.client = cls.servers_client
@classmethod
def resource_setup(cls):
super(NoVNCConsoleTestJSON, cls).resource_setup()
cls.server = cls.create_test_server(wait_until="ACTIVE")
cls.use_get_remote_console = False
if not cls.is_requested_microversion_compatible('2.5'):
cls.use_get_remote_console = True
def _validate_novnc_html(self, vnc_url):
"""Verify we can connect to novnc and get back the javascript."""
resp = urllib3.PoolManager().request('GET', vnc_url)
# Make sure that the GET request was accepted by the novncproxy
self.assertEqual(resp.status, 200, 'Got a Bad HTTP Response on the '
'initial call: ' + str(resp.status))
# Do some basic validation to make sure it is an expected HTML document
resp_data = resp.data.decode()
# This is needed in the case of example: <html lang="en">
self.assertRegex(resp_data, '<html.*>',
'Not a valid html document in the response.')
self.assertIn('</html>', resp_data,
'Not a valid html document in the response.')
# Just try to make sure we got JavaScript back for noVNC, since we
# won't actually use it since not inside of a browser
self.assertIn('noVNC', resp_data,
'Not a valid noVNC javascript html document.')
self.assertIn('<script', resp_data,
'Not a valid noVNC javascript html document.')
def _validate_rfb_negotiation(self):
"""Verify we can connect to novnc and do the websocket connection."""
# Turn the Socket into a WebSocket to do the communication
data = self._websocket.receive_frame()
self.assertFalse(data is None or not data,
'Token must be invalid because the connection '
'closed.')
# Parse the RFB version from the data to make sure it is valid
# and belong to the known supported RFB versions.
version = float("%d.%d" % (int(data[4:7], base=10),
int(data[8:11], base=10)))
# Add the max RFB versions supported
supported_versions = [3.3, 3.8]
self.assertIn(version, supported_versions,
'Bad RFB Version: ' + str(version))
# Send our RFB version to the server
self._websocket.send_frame(data)
# Get the sever authentication type and make sure None is supported
data = self._websocket.receive_frame()
self.assertIsNotNone(data, 'Expected authentication type None.')
data_length = len(data)
if version == 3.3:
# For RFB 3.3: in the security handshake, rather than a two-way
# negotiation, the server decides the security type and sends a
# single word(4 bytes).
self.assertEqual(
data_length, 4, 'Expected authentication type None.')
self.assertIn(1, [int(data[i]) for i in (0, 3)],
'Expected authentication type None.')
else:
self.assertGreaterEqual(
len(data), 2, 'Expected authentication type None.')
self.assertIn(
1,
[int(data[i + 1]) for i in range(int(data[0]))],
'Expected authentication type None.')
# Send to the server that we only support authentication
# type None
self._websocket.send_frame(bytes((1,)))
# The server should send 4 bytes of 0's if security
# handshake succeeded
data = self._websocket.receive_frame()
self.assertEqual(
len(data), 4,
'Server did not think security was successful.')
self.assertEqual(
[int(i) for i in data], [0, 0, 0, 0],
'Server did not think security was successful.')
# Say to leave the desktop as shared as part of client initialization
self._websocket.send_frame(bytes((1,)))
# Get the server initialization packet back and make sure it is the
# right structure where bytes 20-24 is the name length and
# 24-N is the name
data = self._websocket.receive_frame()
data_length = len(data) if data is not None else 0
self.assertFalse(data_length <= 24 or
data_length != (struct.unpack(">L",
data[20:24])[0] + 24),
'Server initialization was not the right format.')
# Since the rest of the data on the screen is arbitrary, we will
# close the socket and end our validation of the data at this point
# Assert that the latest check was false, meaning that the server
# initialization was the right format
self.assertFalse(data_length <= 24 or
data_length != (struct.unpack(">L",
data[20:24])[0] + 24))
def _validate_websocket_upgrade(self):
"""Verify that the websocket upgrade was successful.
Parses response and ensures that required response
fields are present and accurate.
(https://tools.ietf.org/html/rfc7231#section-6.2.2)
"""
self.assertTrue(
self._websocket.response.startswith(b'HTTP/1.1 101 Switching '
b'Protocols'),
'Incorrect HTTP return status code: {}'.format(
str(self._websocket.response)
)
)
_required_header = 'upgrade: websocket'
_response = str(self._websocket.response).lower()
self.assertIn(
_required_header,
_response,
'Did not get the expected WebSocket HTTP Response.'
)
@decorators.idempotent_id('c640fdff-8ab4-45a4-a5d8-7e6146cbd0dc')
def test_novnc(self):
"""Test accessing novnc console of server"""
if self.use_get_remote_console:
body = self.client.get_remote_console(
self.server['id'], console_type='novnc',
protocol='vnc')['remote_console']
else:
body = self.client.get_vnc_console(self.server['id'],
type='novnc')['console']
self.assertEqual('novnc', body['type'])
# Do the initial HTTP Request to novncproxy to get the NoVNC JavaScript
self._validate_novnc_html(body['url'])
# Do the WebSockify HTTP Request to novncproxy to do the RFB connection
self._websocket = compute.create_websocket(body['url'])
# Validate that we successfully connected and upgraded to Web Sockets
self._validate_websocket_upgrade()
# Validate the RFB Negotiation to determine if a valid VNC session
self._validate_rfb_negotiation()
@decorators.idempotent_id('f9c79937-addc-4aaa-9e0e-841eef02aeb7')
def test_novnc_bad_token(self):
"""Test accessing novnc console with bad token
Do the WebSockify HTTP Request to novnc proxy with a bad token,
the novnc proxy should reject the connection and closed it.
"""
if self.use_get_remote_console:
body = self.client.get_remote_console(
self.server['id'], console_type='novnc',
protocol='vnc')['remote_console']
else:
body = self.client.get_vnc_console(self.server['id'],
type='novnc')['console']
self.assertEqual('novnc', body['type'])
# Do the WebSockify HTTP Request to novncproxy with a bad token
parts = urlparse.urlparse(body['url'])
qparams = urlparse.parse_qs(parts.query)
if 'path' in qparams:
qparams['path'] = urlparse.unquote(qparams['path'][0]).replace(
'token=', 'token=bad')
elif 'token' in qparams:
qparams['token'] = 'bad' + qparams['token'][0]
new_query = urlparse.urlencode(qparams)
new_parts = urlparse.ParseResult(parts.scheme, parts.netloc,
parts.path, parts.params, new_query,
parts.fragment)
url = urlparse.urlunparse(new_parts)
self._websocket = compute.create_websocket(url)
# Make sure the novncproxy rejected the connection and closed it
data = self._websocket.receive_frame()
self.assertTrue(data is None or not data,
"The novnc proxy actually sent us some data, but we "
"expected it to close the connection.")
|
openstack/tempest
|
tempest/api/compute/servers/test_novnc.py
|
Python
|
apache-2.0
| 10,583
|
import picamera
import socket
import struct
import time
import io
class ClienteColector(object):
# Se define el constructor de la clase ClienteColector, que recibe
# los siguientes parametros:
# * resolucion: es una tupla que contiene la resolucion de la
# camara en pixeles.
# * tiempo_captura: tiempo (segundos) que va a durar el streaming.
# * direccion_servidor: es una tupla que contiene la direccion IP
# del servidor al cual se va a enlazar el cliente y el puerto
# que se va a usar en formato (IP_SERVIDOR, PUERTO).
# * framerate: velocidad (frames por segundo) de la camara.
def __init__(self, resolucion, tiempo_captura, direccion_servidor, framerate):
# Se definen las variables globales resolucion, tiempo_captura y framerate.
self.framerate = framerate
self.resolucion = resolucion
self.tiempo_captura = tiempo_captura
# Conectamos el cliente.
self.conectar_cliente(direccion_servidor)
def conectar_cliente(self, direccion):
# Creamos un cliente y lo enlazamos a la direccion deseada.
self.cliente = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.cliente.connect(direccion)
# Creamos un archivo asociado a la conexion.
self.conexion = cliente.makefile('wb')
def iniciar_streaming(self):
try:
with picamera.PiCamera() as camara:
# Ajustamos la resolucion de la camara.
camara.resolution = self.resolucion
# La velocidad por defecto sera de 10fps (frames por segundo)
camara.framerate = self.framerate
# Hacemos una pausa de 2 segundos para que la camara
# se prepare y guardamos el tiempo de inicio.
time.sleep(2)
tiempo_inicial = time.time()
# Se reserva un stream para almacenar las imagenes de
# manera temporal y poder acceder a sus propiedades.
stream = io.BytesIO()
# Para capturar imagenes de manera continua usamos un ciclo
# infinito que se ejecuta con el metodo capture_continous().
# Las capturas se almacenan en la variable stream en formato JPEG y
# habilitamos el puerto de video porque es mas veloz que el de la camara.
for img in camara.capture_continuous(stream, 'jpeg', use_video_port=True):
# Escribimos el tamano de la captura en formato Little Endian y ejecutamos
# el metodo flush() para asegurarnos de que la captura ha sido enviada.
self.conexion.write(struct.pack('<L', stream.tell()))
self.conexion.flush()
# Nos movemos al inicio del objeto stream y enviamos la captura a
# traves de la conexion.
stream.seek(0)
self.conexion.write(stream.read())
# Si el tiempo de captura se ha excedido, se detiene el streaming.
if time.time() - tiempo_inicial > self.tiempo_captura:
break
# Regresamos al inicio del stream, ya que al llamar el metodo
# read() se mueve el apuntador hasta la ultima posicion; y
# finalmente, limpiamos la variable para escribir la siguiente
# captura generada por la camara.
stream.seek(0)
stream.truncate()
# Cuando el tiempo de captura se ha alcanzado, le enviamos un 0 al servidor
# para indicarle que terminaron las capturas.
self.conexion.write(struct.pack('<L', 0))
finally:
self.conexion.close()
self.cliente.close()
if __name__ == '__main__':
colector = ClienteColector((320, 240), 600, ('192.168.0.100', 8000), 10)
colector.iniciar_streaming()
|
MiguelGarciaAlcala/VehiculoAutonomoRC
|
VA_GECPC/CODIGO_PROYECTO/Algoritmo de Recoleccion/Python/ClienteColector.py
|
Python
|
gpl-3.0
| 3,988
|
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
import paste.urlmap
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
def root_app_factory(loader, global_conf, **local_conf):
if not CONF.enable_v1_api:
del local_conf['/v1']
return paste.urlmap.urlmap_factory(loader, global_conf, **local_conf)
|
hahaps/openstack-project-generator
|
template/<project_name>/api/__init__.py
|
Python
|
apache-2.0
| 1,063
|
from mpf.system.modes import Mode
class Rescue(Mode):
def mode_init(self):
pass
def mode_stop(self):
self.machine.events.post('return_to_mission_control')
|
GabeKnuth/STTNG
|
modes/rescue/code/rescue.py
|
Python
|
mit
| 189
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2013 Carlos Cesar Caballero Diaz <ccesar@linuxmail.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gtk
import os
import sys
import imp
import spia.internationalizator as internationalizator
from spia.internationalizator import _
#import gettext #gettext internationalization
APP_NAME = "services-manager"
LOCALE_DIR = os.path.join(sys.path[0], "locale")
#gettext.install(APP_NAME, LOCALE_DIR) #gettext internacionalization
internationalizator.load_locale_chains(LOCALE_DIR)
ABAUT_COMMENTS = _("Services Manager, Take control of your services from the desktop.\nInpired in the Apache-Switch tool:\nhttp://apache-switch.webuda.com")
class manager:
def __init__(self):
self.set_proc_name(APP_NAME)
#folders
self.config_folder = os.getenv('HOME')+"/.services-manager"
self.system_plugin_folder = sys.path[0]+"/plugins"
self.user_plugin_folder = self.config_folder+"/plugins"
self.plugin_folder = [self.system_plugin_folder,self.user_plugin_folder]
#plugins main module name
self.main_module = "__init__"
# create folders if dont exsist
if not os.path.exists(self.config_folder):
os.makedirs(self.config_folder)
if not os.path.exists(self.user_plugin_folder):
os.makedirs(self.user_plugin_folder)
#image files
self.image_green = sys.path[0]+"/media/green.png"
self.image_red = sys.path[0]+"/media/red.png"
#tray declaration
self.tray_icon = gtk.status_icon_new_from_stock(gtk.STOCK_INFO)
self.tray_menu = gtk.Menu()
self.update_menu()
self.tray_icon.connect('popup-menu', self.show_menu, self.tray_menu)
self.tray_icon.set_tooltip("Services Manager")
def get_spia(self):
(file, filename, data) = imp.find_module("spia")
spia_module = imp.load_module("spia", file, filename, data)
return spia_module
def set_proc_name(self, newname):
"""Set a system name to the python process"""
from ctypes import cdll, byref, create_string_buffer
libc = cdll.LoadLibrary('libc.so.6')
buff = create_string_buffer(len(newname)+1)
buff.value = newname
libc.prctl(15, byref(buff), 0, 0, 0)
def getPlugins(self):
"""obtain plugins from folders"""
plugins = []
for folder in self.plugin_folder:
possibleplugins = os.listdir(folder)
for i in possibleplugins:
location = os.path.join(folder, i)
if not os.path.isdir(location) or not self.main_module + ".py" in os.listdir(location):
continue
info = imp.find_module(self.main_module, [location])
plugins.append({"name": i, "info": info})
return plugins
def loadPlugin(self, plugin):
return imp.load_module(self.main_module, *plugin["info"])
def update_menu (self, *args):
#remove menu entrys
for i in self.tray_menu.get_children():
self.tray_menu.remove(i)
#load plugins
for i in self.getPlugins():
#print("Loading plugin " + i["name"])
plugin = self.loadPlugin(i)
plugin.run(self)
self.tray_menu.append(gtk.SeparatorMenuItem())
self.menu_refresh = gtk.ImageMenuItem(gtk.STOCK_REFRESH)
self.menu_refresh.connect("activate", self.update_menu)
self.tray_menu.append(self.menu_refresh)
self.menu_about = gtk.ImageMenuItem(gtk.STOCK_ABOUT)
self.menu_about.connect("activate", self.abaut_dialog)
self.tray_menu.append(self.menu_about)
self.tray_menu.append(gtk.SeparatorMenuItem())
self.menu_quit = gtk.ImageMenuItem(gtk.STOCK_CLOSE)
self.menu_quit.connect("activate", lambda w: gtk.main_quit())
self.tray_menu.append(self.menu_quit)
self.tray_menu.show_all()
def show_menu(self, status_icon, button, activate_time, menu):
menu.popup(None, None, gtk.status_icon_position_menu, button, activate_time, status_icon)
def show_icon(self, *args ):
self.tray_icon.set_visible(True)
return False
def do_response(self, dialog, response):
if response == gtk.RESPONSE_CANCEL:
dialog.destroy()
def notify(self, notification):
try:
import pynotify
if pynotify.init("Services Manager"):
n = pynotify.Notification(notification)
#n.set_timeout(10000)
n.show()
except:
pass
def abaut_dialog(self, *args):
"""Show the Abaut dialog"""
about = gtk.AboutDialog()
about.set_name("services-manager")
about.set_version("0.2")
about.set_comments(ABAUT_COMMENTS)
about.set_license("GPL v3")
about.set_website("https://github.com/cccaballero/services-manager")
about.set_authors(["Carlos Cesar Caballero Diaz <ccesar@linuxmail.org>"])
#about.set_logo(self.pixbu_logo)
about.run()
about.hide()
pass
if __name__ == '__main__':
manager()
gtk.main()
|
cccaballero/services-manager
|
services-manager.py
|
Python
|
gpl-3.0
| 5,938
|
from Root.src import application
import unittest
class test11(unittest.TestCase):
def test_one(self):
self.assertEqual(application.foo(),1)
if __name__ == '__main__':
unittest.main()
|
codeboardio/mantra
|
test/test_resources/python-unittest/py_error_one_file_manyErrors/Root/test/test11.py
|
Python
|
mit
| 188
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
"""
Sequential simulation:
Make plot to show difference between observations and predicted values
"""
import SequentialSimulation_results as seqsim
import matplotlib.pyplot as plt
nobs = len(seqsim.obs)
obs = seqsim.obs
pred_f = seqsim.pred_f_central
fig1 = plt.figure()
plt.plot(pred_f[:,0])
plt.plot(obs[:,0])
plt.title('Obs01')
plt.xlabel('time')
plt.legend(['prediction','observation'])
fig2 = plt.figure()
plt.plot(pred_f[:,1])
plt.plot(obs[:,1])
plt.title('Obs02')
plt.xlabel('time')
plt.legend(['prediction','observation'])
fig3 = plt.figure()
plt.plot(pred_f[:,2])
plt.plot(obs[:,2])
plt.title('Obs03')
plt.xlabel('time')
plt.legend(['prediction','observation'])
diff = pred_f - obs
fig4 = plt.figure()
plt.plot(diff)
plt.title('Difference between model and observations')
plt.ylabel('prediction-observation')
plt.xlabel('time')
plt.show()
|
OpenDA-Association/OpenDA
|
model_dflowfm_blackbox/tests/estuary_kalman_FMSuite2019.01/plot_sequential.py
|
Python
|
lgpl-3.0
| 906
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module provides input and output from the CSSR file format.
"""
import re
from monty.io import zopen
from pymatgen.core.lattice import Lattice
from pymatgen.core.structure import Structure
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyuep@gmail.com"
__date__ = "Jan 24, 2012"
class Cssr:
"""
Basic object for working with Cssr file. Right now, only conversion from
a Structure to a Cssr file is supported.
"""
def __init__(self, structure):
"""
Args:
structure (Structure/IStructure): A structure to create the Cssr object.
"""
if not structure.is_ordered:
raise ValueError("Cssr file can only be constructed from ordered "
"structure")
self.structure = structure
def __str__(self):
output = ["{:.4f} {:.4f} {:.4f}"
.format(*self.structure.lattice.abc),
"{:.2f} {:.2f} {:.2f} SPGR = 1 P 1 OPT = 1"
.format(*self.structure.lattice.angles),
"{} 0".format(len(self.structure)),
"0 {}".format(self.structure.formula)]
for i, site in enumerate(self.structure.sites):
output.append("{} {} {:.4f} {:.4f} {:.4f}"
.format(i + 1, site.specie, site.a, site.b, site.c))
return "\n".join(output)
def write_file(self, filename):
"""
Write out a CSSR file.
Args:
filename (str): Filename to write to.
"""
with zopen(filename, 'wt') as f:
f.write(str(self) + "\n")
@staticmethod
def from_string(string):
"""
Reads a string representation to a Cssr object.
Args:
string (str): A string representation of a CSSR.
Returns:
Cssr object.
"""
lines = string.split("\n")
toks = lines[0].split()
lengths = [float(i) for i in toks]
toks = lines[1].split()
angles = [float(i) for i in toks[0:3]]
latt = Lattice.from_parameters(*lengths, *angles)
sp = []
coords = []
for l in lines[4:]:
m = re.match(
r"\d+\s+(\w+)\s+([0-9\-\.]+)\s+([0-9\-\.]+)\s+([0-9\-\.]+)",
l.strip())
if m:
sp.append(m.group(1))
coords.append([float(m.group(i)) for i in range(2, 5)])
return Cssr(Structure(latt, sp, coords))
@staticmethod
def from_file(filename):
"""
Reads a CSSR file to a Cssr object.
Args:
filename (str): Filename to read from.
Returns:
Cssr object.
"""
with zopen(filename, "rt") as f:
return Cssr.from_string(f.read())
|
gVallverdu/pymatgen
|
pymatgen/io/cssr.py
|
Python
|
mit
| 3,006
|
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.db.models import Q
from .models import Resource
def index(request):
latest_resources_list = Resource.objects.order_by('-id')[:5]
context = {'latest_resources_list': latest_resources_list,}
return render(request, 'curated_resources/index.html', context)
def detail(request, resource_slug):
print resource_slug
resource = get_object_or_404(Resource, slug=resource_slug, published=True)
return render(request, 'curated_resources/detail.html', {'resource': resource})
# from .filters import ResourceFilter
# def resource_list(request):
# f = ResourceFilter(request.GET, queryset=Resource.objects.all())
# return render(request, 'curated_resources/filtertemplate.html', {'filter': f})
from django_easyfilters import FilterSet
class ResourceFilterSet(FilterSet):
fields = [
'domains',
'topics',
'suitable_for'
]
title_fields = ['domains', 'topics']
def resource_list(request):
resources = Resource.objects.filter(published=True).order_by('title')
search_fields = [
{
"field_name": "title",
"field_label": "Title/description",
"placeholder": "",
"search_keys": [
"title__icontains",
"short_title__icontains",
"description__icontains",
],
},
]
for search_field in search_fields:
field_name = search_field["field_name"]
if field_name in request.GET:
query = request.GET[field_name]
search_field["value"] = query
q_object = Q()
for search_key in search_field["search_keys"]:
lookup = {search_key: query}
q_object |= Q(**lookup)
resources = resources.distinct().filter(q_object)
hidden_search_fields = []
for key in ResourceFilterSet.fields:
if key not in [search_field["field_name"] for search_field in search_fields]:
for query_value in request.GET.getlist(key):
# the field_name and query_value populate some <input> elements
hidden_search_fields.append(
{
"field_name": key,
"value": query_value,
})
resourcesfilter = ResourceFilterSet(resources, request.GET)
return render(request, "curated_resources/resourcelist.html", {
'resources': resourcesfilter.qs,
'resourcesfilter': resourcesfilter,
"search_fields": search_fields,
})
|
evildmp/django-curated-resources
|
curated_resources/views.py
|
Python
|
bsd-2-clause
| 2,752
|
'''
Created on 16 Sep 2017
@author: Mathias Bucher
'''
from Tkinter import Frame
from VStyles import rootColor, getFrame, getButtonEntry
from model.ModelEntry import ModelEntry
class VOverview(Frame):
'''
classdocs
'''
name = "Overview"
def __init__(self, parent, log, actions):
'''
Constructor
'''
Frame.__init__(self, parent)
self.configure(bg=rootColor)
self.log = log
self.actions = actions
self.sortedEntries = dict()
self.log.add(self.log.Info, __file__, "init" )
def show(self, entries):
'''Displays the entries dict sorted by their keys'''
currentRow = 0
for k, v in entries.iteritems():
self.sortedEntries[k] = getFrame(self)
for e in v:
but = getButtonEntry(self.sortedEntries[k], command=lambda n=e.name: self.entryClicked(n))
but["text"] = e.name
self.sortedEntries[k].grid(row=currentRow)
currentRow += 1
def entryClicked(self, entryName):
'''This method is called when any showed entry is clicked'''
self.log.add(self.log.Info, __file__, entryName + " clicked")
if self.actions != None:
if "openEntryOverviewAction" in self.actions:
self.actions["openEntryOverviewAction"](entryName)
def removeAll(self):
'''Removes all showed entries'''
for k, v in self.sortedEntries.iteritems():
self.sortedEntries[k].destroy()
self.sortedEntries = dict()
|
maeddlae/keepit
|
src/view/VOverview.py
|
Python
|
mit
| 1,608
|
# -*- coding: utf-8 -*-
# Flask License
#
# Copyright © 2010 by the Pallets team.
#
# Some rights reserved.
# Redistribution and use in source and binary forms of the software as well as
# documentation, with or without modification, are permitted provided that the
# following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this list of conditions
# and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this list of conditions
# and the following disclaimer in the documentation and/or other materials provided with the distribution.
# Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE AND DOCUMENTATION IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE AND DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# http://flask.pocoo.org/snippets/62/
from urllib.parse import urlparse, urljoin
from flask import request, url_for, redirect
def is_safe_url(target):
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ('http', 'https') and ref_url.netloc == test_url.netloc
def get_redirect_target():
for target in request.values.get('next'), request.referrer:
if not target:
continue
if is_safe_url(target):
return target
def redirect_back(endpoint, **values):
target = request.form['next']
if not target or not is_safe_url(target):
target = url_for(endpoint, **values)
return redirect(target)
|
janeczku/calibre-web
|
cps/redirect.py
|
Python
|
gpl-3.0
| 2,375
|
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Martin Luessi <mluessi@nmr.mgh.harvard.edu>
# Eric Larson <larson.eric.d@gmail.com>
# Denis Egnemann <denis.engemann@gmail.com>
# Stefan Appelhoff <stefan.appelhoff@mailbox.org>
# Adam Li <adam2392@gmail.com>
# Daniel McCloy <dan@mccloy.info>
#
# License: BSD Style.
from collections import OrderedDict
import os
import os.path as op
import sys
import zipfile
import tempfile
import numpy as np
from .config import _hcp_mmp_license_text, MNE_DATASETS
from ..label import read_labels_from_annot, Label, write_labels_to_annot
from ..utils import (get_config, set_config, logger,
verbose, get_subjects_dir, _pl, _safe_input)
from ..utils.docs import docdict
from ..utils.check import _soft_import
from ..externals.doccer import docformat
DEPRECATION_MESSAGE_TEMPLATE = "use mne.datasets.has_dataset('{}') instead."
_HAS_DATA_DOCSTRING_TEMPLATE = """\
Check for presence of {} dataset on user's system.
Returns
-------
presence : bool
Whether the dataset was found.
"""
_data_path_doc = """Get path to local copy of {name} dataset.
Parameters
----------
path : None | str
Location of where to look for the {name} dataset.
If None, the environment variable or config parameter
``{conf}`` is used. If it doesn't exist, the
"~/mne_data" directory is used. If the {name} dataset
is not found under the given path, the data
will be automatically downloaded to the specified folder.
force_update : bool
Force update of the {name} dataset even if a local copy exists.
Default is False.
update_path : bool | None
If True (default), set the ``{conf}`` in mne-python
config to the given path. If None, the user is prompted.
download : bool
If False and the {name} dataset has not been downloaded yet,
it will not be downloaded and the path will be returned as
'' (empty string). This is mostly used for debugging purposes
and can be safely ignored by most users.
%(verbose)s
Returns
-------
path : str
Path to {name} dataset directory.
"""
_data_path_doc_accept = _data_path_doc.split('%(verbose)s')
_data_path_doc_accept[-1] = '%(verbose)s' + _data_path_doc_accept[-1]
_data_path_doc_accept.insert(1, ' %(accept)s')
_data_path_doc_accept = ''.join(_data_path_doc_accept)
_data_path_doc = docformat(_data_path_doc, docdict)
_data_path_doc_accept = docformat(_data_path_doc_accept, docdict)
_version_doc = """Get version of the local {name} dataset.
Returns
-------
version : str | None
Version of the {name} local dataset, or None if the dataset
does not exist locally.
"""
def _dataset_version(path, name):
"""Get the version of the dataset."""
ver_fname = op.join(path, 'version.txt')
if op.exists(ver_fname):
with open(ver_fname, 'r') as fid:
version = fid.readline().strip() # version is on first line
else:
# Sample dataset versioning was introduced after 0.3
# SPM dataset was introduced with 0.7
versions = dict(sample='0.7', spm='0.3')
version = versions.get(name, '0.0')
return version
def _get_path(path, key, name):
"""Get a dataset path."""
# 1. Input
if path is not None:
if not isinstance(path, str):
raise ValueError('path must be a string or None')
return path
# 2. get_config(key) — unless key is None or "" (special get_config values)
# 3. get_config('MNE_DATA')
path = get_config(key or 'MNE_DATA', get_config('MNE_DATA'))
if path is not None:
if not op.exists(path):
msg = (f"Download location {path} as specified by MNE_DATA does "
f"not exist. Either create this directory manually and try "
f"again, or set MNE_DATA to an existing directory.")
raise FileNotFoundError(msg)
return path
# 4. ~/mne_data (but use a fake home during testing so we don't
# unnecessarily create ~/mne_data)
logger.info('Using default location ~/mne_data for %s...' % name)
path = op.join(os.getenv('_MNE_FAKE_HOME_DIR',
op.expanduser("~")), 'mne_data')
if not op.exists(path):
logger.info('Creating ~/mne_data')
try:
os.mkdir(path)
except OSError:
raise OSError("User does not have write permissions "
"at '%s', try giving the path as an "
"argument to data_path() where user has "
"write permissions, for ex:data_path"
"('/home/xyz/me2/')" % (path))
return path
def _do_path_update(path, update_path, key, name):
"""Update path."""
path = op.abspath(path)
identical = get_config(key, '', use_env=False) == path
if not identical:
if update_path is None:
update_path = True
if '--update-dataset-path' in sys.argv:
answer = 'y'
else:
msg = ('Do you want to set the path:\n %s\nas the default '
'%s dataset path in the mne-python config [y]/n? '
% (path, name))
answer = _safe_input(msg, alt='pass update_path=True')
if answer.lower() == 'n':
update_path = False
if update_path:
set_config(key, path, set_env=False)
return path
def _download_mne_dataset(name, processor, path, force_update,
update_path, download, accept=False):
"""Aux function for downloading internal MNE datasets."""
from mne.datasets._fetch import fetch_dataset
# import pooch library for handling the dataset downloading
pooch = _soft_import('pooch', 'dataset downloading', strict=True)
dataset_params = MNE_DATASETS[name]
dataset_params['dataset_name'] = name
config_key = MNE_DATASETS[name]['config_key']
folder_name = MNE_DATASETS[name]['folder_name']
# get download path for specific dataset
path = _get_path(path=path, key=config_key, name=name)
# instantiate processor that unzips file
if processor == 'nested_untar':
processor_ = pooch.Untar(extract_dir=op.join(path, folder_name))
elif processor == 'nested_unzip':
processor_ = pooch.Unzip(extract_dir=op.join(path, folder_name))
else:
processor_ = processor
# handle case of multiple sub-datasets with different urls
if name == 'visual_92_categories':
dataset_params = []
for name in ['visual_92_categories_1', 'visual_92_categories_2']:
this_dataset = MNE_DATASETS[name]
this_dataset['dataset_name'] = name
dataset_params.append(this_dataset)
return fetch_dataset(dataset_params=dataset_params, processor=processor_,
path=path, force_update=force_update,
update_path=update_path, download=download,
accept=accept)
def _get_version(name):
"""Get a dataset version."""
from mne.datasets._fetch import fetch_dataset
if not has_dataset(name):
return None
dataset_params = MNE_DATASETS[name]
dataset_params['dataset_name'] = name
config_key = MNE_DATASETS[name]['config_key']
# get download path for specific dataset
path = _get_path(path=None, key=config_key, name=name)
return fetch_dataset(dataset_params, path=path,
return_version=True)[1]
def has_dataset(name):
"""Check for presence of a dataset.
Parameters
----------
name : str | dict
The dataset to check. Strings refer to one of the supported datasets
listed :ref:`here <datasets>`. A :class:`dict` can be used to check for
user-defined datasets (see the Notes section of :func:`fetch_dataset`),
and must contain keys ``dataset_name``, ``archive_name``, ``url``,
``folder_name``, ``hash``.
Returns
-------
has : bool
True if the dataset is present.
"""
from mne.datasets._fetch import fetch_dataset
if isinstance(name, dict):
dataset_name = name['dataset_name']
dataset_params = name
else:
dataset_name = 'spm' if name == 'spm_face' else name
dataset_params = MNE_DATASETS[dataset_name]
dataset_params['dataset_name'] = dataset_name
config_key = dataset_params['config_key']
# get download path for specific dataset
path = _get_path(path=None, key=config_key, name=dataset_name)
dp = fetch_dataset(dataset_params, path=path, download=False,
check_version=False)
if dataset_name.startswith('bst_'):
check = dataset_name
else:
check = MNE_DATASETS[dataset_name]['folder_name']
return dp.endswith(check)
@verbose
def _download_all_example_data(verbose=True):
"""Download all datasets used in examples and tutorials."""
# This function is designed primarily to be used by CircleCI, to:
#
# 1. Streamline data downloading
# 2. Make CircleCI fail early (rather than later) if some necessary data
# cannot be retrieved.
# 3. Avoid download statuses and timing biases in rendered examples.
#
# verbose=True by default so we get nice status messages.
# Consider adding datasets from here to CircleCI for PR-auto-build
from . import (sample, testing, misc, spm_face, somato, brainstorm,
eegbci, multimodal, opm, hf_sef, mtrf, fieldtrip_cmc,
kiloword, phantom_4dbti, sleep_physionet, limo,
fnirs_motor, refmeg_noise, fetch_infant_template,
fetch_fsaverage, ssvep, erp_core, epilepsy_ecog,
fetch_phantom)
sample_path = sample.data_path()
testing.data_path()
misc.data_path()
spm_face.data_path()
somato.data_path()
hf_sef.data_path()
multimodal.data_path()
fnirs_motor.data_path()
opm.data_path()
mtrf.data_path()
fieldtrip_cmc.data_path()
kiloword.data_path()
phantom_4dbti.data_path()
refmeg_noise.data_path()
ssvep.data_path()
epilepsy_ecog.data_path()
brainstorm.bst_raw.data_path(accept=True)
brainstorm.bst_auditory.data_path(accept=True)
brainstorm.bst_resting.data_path(accept=True)
phantom_path = brainstorm.bst_phantom_elekta.data_path(accept=True)
fetch_phantom('otaniemi', subjects_dir=phantom_path)
brainstorm.bst_phantom_ctf.data_path(accept=True)
eegbci.load_data(1, [6, 10, 14], update_path=True)
for subj in range(4):
eegbci.load_data(subj + 1, runs=[3], update_path=True)
sleep_physionet.age.fetch_data(subjects=[0, 1], recording=[1])
# If the user has SUBJECTS_DIR, respect it, if not, set it to the EEG one
# (probably on CircleCI, or otherwise advanced user)
fetch_fsaverage(None)
fetch_infant_template('6mo')
fetch_hcp_mmp_parcellation(
subjects_dir=sample_path + '/subjects', accept=True)
limo.load_data(subject=1, update_path=True)
erp_core.data_path()
@verbose
def fetch_aparc_sub_parcellation(subjects_dir=None, verbose=None):
"""Fetch the modified subdivided aparc parcellation.
This will download and install the subdivided aparc parcellation
:footcite:'KhanEtAl2018' files for
FreeSurfer's fsaverage to the specified directory.
Parameters
----------
subjects_dir : str | None
The subjects directory to use. The file will be placed in
``subjects_dir + '/fsaverage/label'``.
%(verbose)s
References
----------
.. footbibliography::
"""
# import pooch library for handling the dataset downloading
pooch = _soft_import('pooch', 'dataset downloading', strict=True)
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
destination = op.join(subjects_dir, 'fsaverage', 'label')
urls = dict(lh='https://osf.io/p92yb/download',
rh='https://osf.io/4kxny/download')
hashes = dict(lh='9e4d8d6b90242b7e4b0145353436ef77',
rh='dd6464db8e7762d969fc1d8087cd211b')
for hemi in ('lh', 'rh'):
fname = f'{hemi}.aparc_sub.annot'
fpath = op.join(destination, fname)
if not op.isfile(fpath):
pooch.retrieve(
url=urls[hemi],
known_hash=f"md5:{hashes[hemi]}",
path=destination,
fname=fname
)
@verbose
def fetch_hcp_mmp_parcellation(subjects_dir=None, combine=True, *,
accept=False, verbose=None):
"""Fetch the HCP-MMP parcellation.
This will download and install the HCP-MMP parcellation
:footcite:`GlasserEtAl2016` files for FreeSurfer's fsaverage
:footcite:`Mills2016` to the specified directory.
Parameters
----------
subjects_dir : str | None
The subjects directory to use. The file will be placed in
``subjects_dir + '/fsaverage/label'``.
combine : bool
If True, also produce the combined/reduced set of 23 labels per
hemisphere as ``HCPMMP1_combined.annot``
:footcite:`GlasserEtAl2016supp`.
%(accept)s
%(verbose)s
Notes
-----
Use of this parcellation is subject to terms of use on the
`HCP-MMP webpage <https://balsa.wustl.edu/WN56>`_.
References
----------
.. footbibliography::
"""
# import pooch library for handling the dataset downloading
pooch = _soft_import('pooch', 'dataset downloading', strict=True)
subjects_dir = get_subjects_dir(subjects_dir, raise_error=True)
destination = op.join(subjects_dir, 'fsaverage', 'label')
fnames = [op.join(destination, '%s.HCPMMP1.annot' % hemi)
for hemi in ('lh', 'rh')]
urls = dict(lh='https://ndownloader.figshare.com/files/5528816',
rh='https://ndownloader.figshare.com/files/5528819')
hashes = dict(lh='46a102b59b2fb1bb4bd62d51bf02e975',
rh='75e96b331940227bbcb07c1c791c2463')
if not all(op.isfile(fname) for fname in fnames):
if accept or '--accept-hcpmmp-license' in sys.argv:
answer = 'y'
else:
answer = _safe_input('%s\nAgree (y/[n])? ' % _hcp_mmp_license_text)
if answer.lower() != 'y':
raise RuntimeError('You must agree to the license to use this '
'dataset')
for hemi, fpath in zip(('lh', 'rh'), fnames):
if not op.isfile(fpath):
fname = op.basename(fpath)
pooch.retrieve(
url=urls[hemi],
known_hash=f"md5:{hashes[hemi]}",
path=destination,
fname=fname
)
if combine:
fnames = [op.join(destination, '%s.HCPMMP1_combined.annot' % hemi)
for hemi in ('lh', 'rh')]
if all(op.isfile(fname) for fname in fnames):
return
# otherwise, let's make them
logger.info('Creating combined labels')
groups = OrderedDict([
('Primary Visual Cortex (V1)',
('V1',)),
('Early Visual Cortex',
('V2', 'V3', 'V4')),
('Dorsal Stream Visual Cortex',
('V3A', 'V3B', 'V6', 'V6A', 'V7', 'IPS1')),
('Ventral Stream Visual Cortex',
('V8', 'VVC', 'PIT', 'FFC', 'VMV1', 'VMV2', 'VMV3')),
('MT+ Complex and Neighboring Visual Areas',
('V3CD', 'LO1', 'LO2', 'LO3', 'V4t', 'FST', 'MT', 'MST', 'PH')),
('Somatosensory and Motor Cortex',
('4', '3a', '3b', '1', '2')),
('Paracentral Lobular and Mid Cingulate Cortex',
('24dd', '24dv', '6mp', '6ma', 'SCEF', '5m', '5L', '5mv',)),
('Premotor Cortex',
('55b', '6d', '6a', 'FEF', '6v', '6r', 'PEF')),
('Posterior Opercular Cortex',
('43', 'FOP1', 'OP4', 'OP1', 'OP2-3', 'PFcm')),
('Early Auditory Cortex',
('A1', 'LBelt', 'MBelt', 'PBelt', 'RI')),
('Auditory Association Cortex',
('A4', 'A5', 'STSdp', 'STSda', 'STSvp', 'STSva', 'STGa', 'TA2',)),
('Insular and Frontal Opercular Cortex',
('52', 'PI', 'Ig', 'PoI1', 'PoI2', 'FOP2', 'FOP3',
'MI', 'AVI', 'AAIC', 'Pir', 'FOP4', 'FOP5')),
('Medial Temporal Cortex',
('H', 'PreS', 'EC', 'PeEc', 'PHA1', 'PHA2', 'PHA3',)),
('Lateral Temporal Cortex',
('PHT', 'TE1p', 'TE1m', 'TE1a', 'TE2p', 'TE2a',
'TGv', 'TGd', 'TF',)),
('Temporo-Parieto-Occipital Junction',
('TPOJ1', 'TPOJ2', 'TPOJ3', 'STV', 'PSL',)),
('Superior Parietal Cortex',
('LIPv', 'LIPd', 'VIP', 'AIP', 'MIP',
'7PC', '7AL', '7Am', '7PL', '7Pm',)),
('Inferior Parietal Cortex',
('PGp', 'PGs', 'PGi', 'PFm', 'PF', 'PFt', 'PFop',
'IP0', 'IP1', 'IP2',)),
('Posterior Cingulate Cortex',
('DVT', 'ProS', 'POS1', 'POS2', 'RSC', 'v23ab', 'd23ab',
'31pv', '31pd', '31a', '23d', '23c', 'PCV', '7m',)),
('Anterior Cingulate and Medial Prefrontal Cortex',
('33pr', 'p24pr', 'a24pr', 'p24', 'a24', 'p32pr', 'a32pr', 'd32',
'p32', 's32', '8BM', '9m', '10v', '10r', '25',)),
('Orbital and Polar Frontal Cortex',
('47s', '47m', 'a47r', '11l', '13l',
'a10p', 'p10p', '10pp', '10d', 'OFC', 'pOFC',)),
('Inferior Frontal Cortex',
('44', '45', 'IFJp', 'IFJa', 'IFSp', 'IFSa', '47l', 'p47r',)),
('DorsoLateral Prefrontal Cortex',
('8C', '8Av', 'i6-8', 's6-8', 'SFL', '8BL', '9p', '9a', '8Ad',
'p9-46v', 'a9-46v', '46', '9-46d',)),
('???',
('???',))])
assert len(groups) == 23
labels_out = list()
for hemi in ('lh', 'rh'):
labels = read_labels_from_annot('fsaverage', 'HCPMMP1', hemi=hemi,
subjects_dir=subjects_dir,
sort=False)
label_names = [
'???' if label.name.startswith('???') else
label.name.split('_')[1] for label in labels]
used = np.zeros(len(labels), bool)
for key, want in groups.items():
assert '\t' not in key
these_labels = [li for li, label_name in enumerate(label_names)
if label_name in want]
assert not used[these_labels].any()
assert len(these_labels) == len(want)
used[these_labels] = True
these_labels = [labels[li] for li in these_labels]
# take a weighted average to get the color
# (here color == task activation)
w = np.array([len(label.vertices) for label in these_labels])
w = w / float(w.sum())
color = np.dot(w, [label.color for label in these_labels])
these_labels = sum(these_labels,
Label([], subject='fsaverage', hemi=hemi))
these_labels.name = key
these_labels.color = color
labels_out.append(these_labels)
assert used.all()
assert len(labels_out) == 46
for hemi, side in (('lh', 'left'), ('rh', 'right')):
table_name = './%s.fsaverage164.label.gii' % (side,)
write_labels_to_annot(labels_out, 'fsaverage', 'HCPMMP1_combined',
hemi=hemi, subjects_dir=subjects_dir,
sort=False, table_name=table_name)
def _manifest_check_download(manifest_path, destination, url, hash_):
# import pooch library for handling the dataset downloading
pooch = _soft_import('pooch', 'dataset downloading', strict=True)
with open(manifest_path, 'r') as fid:
names = [name.strip() for name in fid.readlines()]
manifest_path = op.basename(manifest_path)
need = list()
for name in names:
if not op.isfile(op.join(destination, name)):
need.append(name)
logger.info('%d file%s missing from %s in %s'
% (len(need), _pl(need), manifest_path, destination))
if len(need) > 0:
with tempfile.TemporaryDirectory() as path:
logger.info('Downloading missing files remotely')
fname_path = op.join(path, 'temp.zip')
pooch.retrieve(
url=url,
known_hash=f"md5:{hash_}",
path=path,
fname=op.basename(fname_path)
)
logger.info('Extracting missing file%s' % (_pl(need),))
with zipfile.ZipFile(fname_path, 'r') as ff:
members = set(f for f in ff.namelist() if not f.endswith('/'))
missing = sorted(members.symmetric_difference(set(names)))
if len(missing):
raise RuntimeError('Zip file did not have correct names:'
'\n%s' % ('\n'.join(missing)))
for name in need:
ff.extract(name, path=destination)
logger.info('Successfully extracted %d file%s'
% (len(need), _pl(need)))
|
bloyl/mne-python
|
mne/datasets/utils.py
|
Python
|
bsd-3-clause
| 21,557
|
# -*- coding:utf-8 -*-
"""
/***************************************************************************
Python Console for QGIS
-------------------
begin : 2012-09-10
copyright : (C) 2012 by Salvatore Larosa
email : lrssvtml (at) gmail (dot) com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
Some portions of code were taken from https://code.google.com/p/pydee/
"""
import os
from qgis.PyQt.QtCore import Qt, QTimer, QCoreApplication, QSize, QByteArray, QFileInfo, QUrl, QDir
from qgis.PyQt.QtWidgets import QToolBar, QToolButton, QWidget, QSplitter, QTreeWidget, QAction, QFileDialog, QCheckBox, QSizePolicy, QMenu, QGridLayout, QApplication, QShortcut
from qgis.PyQt.QtGui import QDesktopServices, QKeySequence
from qgis.PyQt.QtWidgets import (
QVBoxLayout,
QMessageBox
)
from qgis.utils import iface
from .console_sci import ShellScintilla
from .console_output import ShellOutputScintilla
from .console_editor import EditorTabWidget
from .console_settings import ConsoleOptionsFactory
from qgis.core import Qgis, QgsApplication, QgsSettings
from qgis.gui import (
QgsFilterLineEdit,
QgsHelp,
QgsDockWidget,
QgsGui,
QgsApplicationExitBlockerInterface
)
from functools import partial
import sys
import re
_console = None
_options_factory = ConsoleOptionsFactory()
def show_console():
""" called from QGIS to open the console """
global _console
if _console is None:
parent = iface.mainWindow() if iface else None
_console = PythonConsole(parent)
if iface:
_console.visibilityChanged.connect(iface.actionShowPythonDialog().setChecked)
_console.show() # force show even if it was restored as hidden
# set focus to the console so the user can start typing
# defer the set focus event so it works also whether the console not visible yet
QTimer.singleShot(0, _console.activate)
else:
_console.setUserVisible(not _console.isUserVisible())
# set focus to the console so the user can start typing
if _console.isUserVisible():
_console.activate()
return _console
_console_output = None
# hook for python console so all output will be redirected
# and then shown in console
def console_displayhook(obj):
global _console_output
_console_output = obj
def init_options_widget():
""" called from QGIS to add the console options widget """
global _options_factory
_options_factory.setTitle(QCoreApplication.translate("PythonConsole", "Python Console"))
iface.registerOptionsWidgetFactory(_options_factory)
class ConsoleExitBlocker(QgsApplicationExitBlockerInterface):
def __init__(self, console):
super().__init__()
self.console = console
def allowExit(self):
return self.console.allowExit()
class PythonConsole(QgsDockWidget):
def __init__(self, parent=None):
super().__init__(parent)
self.setObjectName("PythonConsole")
self.setWindowTitle(QCoreApplication.translate("PythonConsole", "Python Console"))
# self.setAllowedAreas(Qt.BottomDockWidgetArea)
self.console = PythonConsoleWidget(self)
QgsGui.instance().optionsChanged.connect(self.console.updateSettings)
self.setWidget(self.console)
self.setFocusProxy(self.console)
# try to restore position from stored main window state
if iface and not iface.mainWindow().restoreDockWidget(self):
iface.mainWindow().addDockWidget(Qt.BottomDockWidgetArea, self)
# closeEvent is not always called for this widget -- so we also trigger a settings
# save on application exit
QgsApplication.instance().aboutToQuit.connect(self.console.saveSettingsConsole)
def activate(self):
self.activateWindow()
self.raise_()
QgsDockWidget.setFocus(self)
def closeEvent(self, event):
self.console.saveSettingsConsole()
QWidget.closeEvent(self, event)
class PythonConsoleWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.setWindowTitle(QCoreApplication.translate("PythonConsole", "Python Console"))
self.settings = QgsSettings()
self.shell = ShellScintilla(self)
self.setFocusProxy(self.shell)
self.shellOut = ShellOutputScintilla(self)
self.tabEditorWidget = EditorTabWidget(self)
# ------------ UI -------------------------------
self.splitterEditor = QSplitter(self)
self.splitterEditor.setOrientation(Qt.Horizontal)
self.splitterEditor.setHandleWidth(6)
self.splitterEditor.setChildrenCollapsible(True)
self.shellOutWidget = QWidget(self)
self.shellOutWidget.setLayout(QVBoxLayout())
self.shellOutWidget.layout().setContentsMargins(0, 0, 0, 0)
self.shellOutWidget.layout().addWidget(self.shellOut)
self.splitter = QSplitter(self.splitterEditor)
self.splitter.setOrientation(Qt.Vertical)
self.splitter.setHandleWidth(3)
self.splitter.setChildrenCollapsible(False)
self.splitter.addWidget(self.shellOutWidget)
self.splitter.addWidget(self.shell)
# self.splitterEditor.addWidget(self.tabEditorWidget)
self.splitterObj = QSplitter(self.splitterEditor)
self.splitterObj.setHandleWidth(3)
self.splitterObj.setOrientation(Qt.Horizontal)
# self.splitterObj.setSizes([0, 0])
# self.splitterObj.setStretchFactor(0, 1)
self.widgetEditor = QWidget(self.splitterObj)
self.widgetFind = QWidget(self)
self.listClassMethod = QTreeWidget(self.splitterObj)
self.listClassMethod.setColumnCount(2)
objInspLabel = QCoreApplication.translate("PythonConsole", "Object Inspector")
self.listClassMethod.setHeaderLabels([objInspLabel, ''])
self.listClassMethod.setColumnHidden(1, True)
self.listClassMethod.setAlternatingRowColors(True)
# self.splitterEditor.addWidget(self.widgetEditor)
# self.splitterObj.addWidget(self.listClassMethod)
# self.splitterObj.addWidget(self.widgetEditor)
# Hide side editor on start up
self.splitterObj.hide()
self.listClassMethod.hide()
# Hide search widget on start up
self.widgetFind.hide()
icon_size = iface.iconSize(dockedToolbar=True) if iface else QSize(16, 16)
sizes = self.splitter.sizes()
self.splitter.setSizes(sizes)
# ----------------Restore Settings------------------------------------
self.restoreSettingsConsole()
# ------------------Toolbar Editor-------------------------------------
# Action for Open File
openFileBt = QCoreApplication.translate("PythonConsole", "Open Script…")
self.openFileButton = QAction(self)
self.openFileButton.setCheckable(False)
self.openFileButton.setEnabled(True)
self.openFileButton.setIcon(QgsApplication.getThemeIcon("mActionScriptOpen.svg"))
self.openFileButton.setMenuRole(QAction.PreferencesRole)
self.openFileButton.setIconVisibleInMenu(True)
self.openFileButton.setToolTip(openFileBt)
self.openFileButton.setText(openFileBt)
openExtEditorBt = QCoreApplication.translate("PythonConsole", "Open in External Editor")
self.openInEditorButton = QAction(self)
self.openInEditorButton.setCheckable(False)
self.openInEditorButton.setEnabled(True)
self.openInEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconShowEditorConsole.svg"))
self.openInEditorButton.setMenuRole(QAction.PreferencesRole)
self.openInEditorButton.setIconVisibleInMenu(True)
self.openInEditorButton.setToolTip(openExtEditorBt)
self.openInEditorButton.setText(openExtEditorBt)
# Action for Save File
saveFileBt = QCoreApplication.translate("PythonConsole", "Save")
self.saveFileButton = QAction(self)
self.saveFileButton.setCheckable(False)
self.saveFileButton.setEnabled(False)
self.saveFileButton.setIcon(QgsApplication.getThemeIcon("mActionFileSave.svg"))
self.saveFileButton.setMenuRole(QAction.PreferencesRole)
self.saveFileButton.setIconVisibleInMenu(True)
self.saveFileButton.setToolTip(saveFileBt)
self.saveFileButton.setText(saveFileBt)
# Action for Save File As
saveAsFileBt = QCoreApplication.translate("PythonConsole", "Save As…")
self.saveAsFileButton = QAction(self)
self.saveAsFileButton.setCheckable(False)
self.saveAsFileButton.setEnabled(True)
self.saveAsFileButton.setIcon(QgsApplication.getThemeIcon("mActionFileSaveAs.svg"))
self.saveAsFileButton.setMenuRole(QAction.PreferencesRole)
self.saveAsFileButton.setIconVisibleInMenu(True)
self.saveAsFileButton.setToolTip(saveAsFileBt)
self.saveAsFileButton.setText(saveAsFileBt)
# Action Cut
cutEditorBt = QCoreApplication.translate("PythonConsole", "Cut")
self.cutEditorButton = QAction(self)
self.cutEditorButton.setCheckable(False)
self.cutEditorButton.setEnabled(True)
self.cutEditorButton.setIcon(QgsApplication.getThemeIcon("mActionEditCut.svg"))
self.cutEditorButton.setMenuRole(QAction.PreferencesRole)
self.cutEditorButton.setIconVisibleInMenu(True)
self.cutEditorButton.setToolTip(cutEditorBt)
self.cutEditorButton.setText(cutEditorBt)
# Action Copy
copyEditorBt = QCoreApplication.translate("PythonConsole", "Copy")
self.copyEditorButton = QAction(self)
self.copyEditorButton.setCheckable(False)
self.copyEditorButton.setEnabled(True)
self.copyEditorButton.setIcon(QgsApplication.getThemeIcon("mActionEditCopy.svg"))
self.copyEditorButton.setMenuRole(QAction.PreferencesRole)
self.copyEditorButton.setIconVisibleInMenu(True)
self.copyEditorButton.setToolTip(copyEditorBt)
self.copyEditorButton.setText(copyEditorBt)
# Action Paste
pasteEditorBt = QCoreApplication.translate("PythonConsole", "Paste")
self.pasteEditorButton = QAction(self)
self.pasteEditorButton.setCheckable(False)
self.pasteEditorButton.setEnabled(True)
self.pasteEditorButton.setIcon(QgsApplication.getThemeIcon("mActionEditPaste.svg"))
self.pasteEditorButton.setMenuRole(QAction.PreferencesRole)
self.pasteEditorButton.setIconVisibleInMenu(True)
self.pasteEditorButton.setToolTip(pasteEditorBt)
self.pasteEditorButton.setText(pasteEditorBt)
# Action Run Script (subprocess)
runScriptEditorBt = QCoreApplication.translate("PythonConsole", "Run Script")
self.runScriptEditorButton = QAction(self)
self.runScriptEditorButton.setCheckable(False)
self.runScriptEditorButton.setEnabled(True)
self.runScriptEditorButton.setIcon(QgsApplication.getThemeIcon("mActionStart.svg"))
self.runScriptEditorButton.setMenuRole(QAction.PreferencesRole)
self.runScriptEditorButton.setIconVisibleInMenu(True)
self.runScriptEditorButton.setToolTip(runScriptEditorBt)
self.runScriptEditorButton.setText(runScriptEditorBt)
# Action Run Script (subprocess)
commentEditorBt = QCoreApplication.translate("PythonConsole", "Comment")
self.commentEditorButton = QAction(self)
self.commentEditorButton.setCheckable(False)
self.commentEditorButton.setEnabled(True)
self.commentEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconCommentEditorConsole.svg"))
self.commentEditorButton.setMenuRole(QAction.PreferencesRole)
self.commentEditorButton.setIconVisibleInMenu(True)
self.commentEditorButton.setToolTip(commentEditorBt)
self.commentEditorButton.setText(commentEditorBt)
# Action Run Script (subprocess)
uncommentEditorBt = QCoreApplication.translate("PythonConsole", "Uncomment")
self.uncommentEditorButton = QAction(self)
self.uncommentEditorButton.setCheckable(False)
self.uncommentEditorButton.setEnabled(True)
self.uncommentEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconUncommentEditorConsole.svg"))
self.uncommentEditorButton.setMenuRole(QAction.PreferencesRole)
self.uncommentEditorButton.setIconVisibleInMenu(True)
self.uncommentEditorButton.setToolTip(uncommentEditorBt)
self.uncommentEditorButton.setText(uncommentEditorBt)
# Action for Object browser
objList = QCoreApplication.translate("PythonConsole", "Object Inspector…")
self.objectListButton = QAction(self)
self.objectListButton.setCheckable(True)
self.objectListButton.setEnabled(self.settings.value("pythonConsole/enableObjectInsp",
False, type=bool))
self.objectListButton.setIcon(QgsApplication.getThemeIcon("console/iconClassBrowserConsole.svg"))
self.objectListButton.setMenuRole(QAction.PreferencesRole)
self.objectListButton.setIconVisibleInMenu(True)
self.objectListButton.setToolTip(objList)
self.objectListButton.setText(objList)
# Action for Find text
findText = QCoreApplication.translate("PythonConsole", "Find Text")
self.findTextButton = QAction(self)
self.findTextButton.setCheckable(True)
self.findTextButton.setEnabled(True)
self.findTextButton.setIcon(QgsApplication.getThemeIcon("console/iconSearchEditorConsole.svg"))
self.findTextButton.setMenuRole(QAction.PreferencesRole)
self.findTextButton.setIconVisibleInMenu(True)
self.findTextButton.setToolTip(findText)
self.findTextButton.setText(findText)
# ----------------Toolbar Console-------------------------------------
# Action Show Editor
showEditor = QCoreApplication.translate("PythonConsole", "Show Editor")
self.showEditorButton = QAction(self)
self.showEditorButton.setEnabled(True)
self.showEditorButton.setCheckable(True)
self.showEditorButton.setIcon(QgsApplication.getThemeIcon("console/iconShowEditorConsole.svg"))
self.showEditorButton.setMenuRole(QAction.PreferencesRole)
self.showEditorButton.setIconVisibleInMenu(True)
self.showEditorButton.setToolTip(showEditor)
self.showEditorButton.setText(showEditor)
# Action for Clear button
clearBt = QCoreApplication.translate("PythonConsole", "Clear Console")
self.clearButton = QAction(self)
self.clearButton.setCheckable(False)
self.clearButton.setEnabled(True)
self.clearButton.setIcon(QgsApplication.getThemeIcon("console/iconClearConsole.svg"))
self.clearButton.setMenuRole(QAction.PreferencesRole)
self.clearButton.setIconVisibleInMenu(True)
self.clearButton.setToolTip(clearBt)
self.clearButton.setText(clearBt)
# Action for settings
optionsBt = QCoreApplication.translate("PythonConsole", "Options…")
self.optionsButton = QAction(self)
self.optionsButton.setCheckable(False)
self.optionsButton.setEnabled(True)
self.optionsButton.setIcon(QgsApplication.getThemeIcon("console/iconSettingsConsole.svg"))
self.optionsButton.setMenuRole(QAction.PreferencesRole)
self.optionsButton.setIconVisibleInMenu(True)
self.optionsButton.setToolTip(optionsBt)
self.optionsButton.setText(optionsBt)
# Action for Run script
runBt = QCoreApplication.translate("PythonConsole", "Run Command")
self.runButton = QAction(self)
self.runButton.setCheckable(False)
self.runButton.setEnabled(True)
self.runButton.setIcon(QgsApplication.getThemeIcon("mActionStart.svg"))
self.runButton.setMenuRole(QAction.PreferencesRole)
self.runButton.setIconVisibleInMenu(True)
self.runButton.setToolTip(runBt)
self.runButton.setText(runBt)
# Help button
self.helpConsoleAction = QAction(self)
self.helpConsoleAction.setEnabled(True)
self.helpConsoleAction.setText(QCoreApplication.translate("PythonConsole", "Python Console Help"))
self.helpAPIAction = QAction(self)
self.helpAPIAction.setEnabled(True)
self.helpAPIAction.setText(QCoreApplication.translate("PythonConsole", "PyQGIS API Documentation"))
self.helpCookbookAction = QAction(self)
self.helpCookbookAction.setEnabled(True)
self.helpCookbookAction.setText(QCoreApplication.translate("PythonConsole", "PyQGIS Cookbook"))
self.helpMenu = QMenu(self)
self.helpMenu.addAction(self.helpConsoleAction)
self.helpMenu.addAction(self.helpAPIAction)
self.helpMenu.addAction(self.helpCookbookAction)
helpBt = QCoreApplication.translate("PythonConsole", "Help…")
self.helpButton = QToolButton(self)
self.helpButton.setPopupMode(QToolButton.InstantPopup)
self.helpButton.setEnabled(True)
self.helpButton.setIcon(QgsApplication.getThemeIcon("console/iconHelpConsole.svg"))
self.helpButton.setToolTip(helpBt)
self.helpButton.setMenu(self.helpMenu)
self.toolBar = QToolBar()
self.toolBar.setEnabled(True)
self.toolBar.setFocusPolicy(Qt.NoFocus)
self.toolBar.setContextMenuPolicy(Qt.DefaultContextMenu)
self.toolBar.setLayoutDirection(Qt.LeftToRight)
self.toolBar.setIconSize(icon_size)
self.toolBar.setMovable(False)
self.toolBar.setFloatable(False)
self.toolBar.addAction(self.clearButton)
self.toolBar.addAction(self.runButton)
self.toolBar.addSeparator()
self.toolBar.addAction(self.showEditorButton)
self.toolBar.addSeparator()
self.toolBar.addAction(self.optionsButton)
self.toolBar.addWidget(self.helpButton)
self.toolBarEditor = QToolBar()
self.toolBarEditor.setEnabled(False)
self.toolBarEditor.setFocusPolicy(Qt.NoFocus)
self.toolBarEditor.setContextMenuPolicy(Qt.DefaultContextMenu)
self.toolBarEditor.setLayoutDirection(Qt.LeftToRight)
self.toolBarEditor.setIconSize(icon_size)
self.toolBarEditor.setMovable(False)
self.toolBarEditor.setFloatable(False)
self.toolBarEditor.addAction(self.openFileButton)
self.toolBarEditor.addAction(self.openInEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.saveFileButton)
self.toolBarEditor.addAction(self.saveAsFileButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.runScriptEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.cutEditorButton)
self.toolBarEditor.addAction(self.copyEditorButton)
self.toolBarEditor.addAction(self.pasteEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.findTextButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.commentEditorButton)
self.toolBarEditor.addAction(self.uncommentEditorButton)
self.toolBarEditor.addSeparator()
self.toolBarEditor.addAction(self.objectListButton)
self.widgetButton = QWidget()
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widgetButton.sizePolicy().hasHeightForWidth())
self.widgetButton.setSizePolicy(sizePolicy)
self.widgetButtonEditor = QWidget(self.widgetEditor)
sizePolicy = QSizePolicy(QSizePolicy.Fixed, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.widgetButtonEditor.sizePolicy().hasHeightForWidth())
self.widgetButtonEditor.setSizePolicy(sizePolicy)
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.shellOut.sizePolicy().hasHeightForWidth())
self.shellOut.setSizePolicy(sizePolicy)
self.shellOut.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.shell.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
# ------------ Layout -------------------------------
self.mainLayout = QGridLayout(self)
self.mainLayout.setMargin(0)
self.mainLayout.setSpacing(0)
self.mainLayout.addWidget(self.widgetButton, 0, 0, 1, 1)
self.mainLayout.addWidget(self.splitterEditor, 0, 1, 1, 1)
self.shellOutWidget.layout().insertWidget(0, self.toolBar)
self.layoutEditor = QGridLayout(self.widgetEditor)
self.layoutEditor.setMargin(0)
self.layoutEditor.setSpacing(0)
self.layoutEditor.addWidget(self.toolBarEditor, 0, 1, 1, 1)
self.layoutEditor.addWidget(self.widgetButtonEditor, 1, 0, 2, 1)
self.layoutEditor.addWidget(self.tabEditorWidget, 1, 1, 1, 1)
self.layoutEditor.addWidget(self.widgetFind, 2, 1, 1, 1)
# Layout for the find widget
self.layoutFind = QGridLayout(self.widgetFind)
self.layoutFind.setContentsMargins(0, 0, 0, 0)
self.lineEditFind = QgsFilterLineEdit()
self.lineEditFind.setShowSearchIcon(True)
placeHolderTxt = QCoreApplication.translate("PythonConsole", "Enter text to find…")
self.lineEditFind.setPlaceholderText(placeHolderTxt)
self.toolBarFindText = QToolBar()
self.toolBarFindText.setIconSize(icon_size)
self.findNextButton = QAction(self)
self.findNextButton.setEnabled(False)
toolTipfindNext = QCoreApplication.translate("PythonConsole", "Find Next")
self.findNextButton.setToolTip(toolTipfindNext)
self.findNextButton.setIcon(QgsApplication.getThemeIcon("console/iconSearchNextEditorConsole.svg"))
self.findPrevButton = QAction(self)
self.findPrevButton.setEnabled(False)
toolTipfindPrev = QCoreApplication.translate("PythonConsole", "Find Previous")
self.findPrevButton.setToolTip(toolTipfindPrev)
self.findPrevButton.setIcon(QgsApplication.getThemeIcon("console/iconSearchPrevEditorConsole.svg"))
self.caseSensitive = QCheckBox()
caseSensTr = QCoreApplication.translate("PythonConsole", "Case Sensitive")
self.caseSensitive.setText(caseSensTr)
self.wholeWord = QCheckBox()
wholeWordTr = QCoreApplication.translate("PythonConsole", "Whole Word")
self.wholeWord.setText(wholeWordTr)
self.wrapAround = QCheckBox()
self.wrapAround.setChecked(True)
wrapAroundTr = QCoreApplication.translate("PythonConsole", "Wrap Around")
self.wrapAround.setText(wrapAroundTr)
self.toolBarFindText.addWidget(self.lineEditFind)
self.toolBarFindText.addAction(self.findPrevButton)
self.toolBarFindText.addAction(self.findNextButton)
self.toolBarFindText.addWidget(self.caseSensitive)
self.toolBarFindText.addWidget(self.wholeWord)
self.toolBarFindText.addWidget(self.wrapAround)
self.layoutFind.addWidget(self.toolBarFindText, 0, 1, 1, 1)
# ------------ Add first Tab in Editor -------------------------------
# self.tabEditorWidget.newTabEditor(tabName='first', filename=None)
# ------------ Signal -------------------------------
self.findTextButton.triggered.connect(self._toggleFind)
self.objectListButton.toggled.connect(self.toggleObjectListWidget)
self.commentEditorButton.triggered.connect(self.commentCode)
self.uncommentEditorButton.triggered.connect(self.uncommentCode)
self.runScriptEditorButton.triggered.connect(self.runScriptEditor)
self.cutEditorButton.triggered.connect(self.cutEditor)
self.copyEditorButton.triggered.connect(self.copyEditor)
self.pasteEditorButton.triggered.connect(self.pasteEditor)
self.showEditorButton.toggled.connect(self.toggleEditor)
self.clearButton.triggered.connect(self.shellOut.clearConsole)
self.optionsButton.triggered.connect(self.openSettings)
self.runButton.triggered.connect(self.shell.entered)
self.openFileButton.triggered.connect(self.openScriptFile)
self.openInEditorButton.triggered.connect(self.openScriptFileExtEditor)
self.saveFileButton.triggered.connect(self.saveScriptFile)
self.saveAsFileButton.triggered.connect(self.saveAsScriptFile)
self.helpConsoleAction.triggered.connect(self.openHelpConsole)
self.helpAPIAction.triggered.connect(self.openHelpAPI)
self.helpCookbookAction.triggered.connect(self.openHelpCookbook)
self.listClassMethod.itemClicked.connect(self.onClickGoToLine)
self.lineEditFind.returnPressed.connect(self._findNext)
self.findNextButton.triggered.connect(self._findNext)
self.findPrevButton.triggered.connect(self._findPrev)
self.lineEditFind.textChanged.connect(self._textFindChanged)
self.findScut = QShortcut(QKeySequence.Find, self.widgetEditor)
self.findScut.setContext(Qt.WidgetWithChildrenShortcut)
self.findScut.activated.connect(self._openFind)
self.findNextScut = QShortcut(QKeySequence.FindNext, self.widgetEditor)
self.findNextScut.setContext(Qt.WidgetWithChildrenShortcut)
self.findNextScut.activated.connect(self._findNext)
self.findPreviousScut = QShortcut(QKeySequence.FindPrevious, self.widgetEditor)
self.findPreviousScut.setContext(Qt.WidgetWithChildrenShortcut)
self.findPreviousScut.activated.connect(self._findPrev)
# Escape on editor hides the find bar
self.findScut = QShortcut(Qt.Key_Escape, self.widgetEditor)
self.findScut.setContext(Qt.WidgetWithChildrenShortcut)
self.findScut.activated.connect(self._closeFind)
if iface is not None:
self.exit_blocker = ConsoleExitBlocker(self)
iface.registerApplicationExitBlocker(self.exit_blocker)
def allowExit(self):
tab_count = self.tabEditorWidget.count()
for i in range(tab_count):
# iterate backwards through tabs, as we may be closing some as we go
tab_index = tab_count - i - 1
tab_widget = self.tabEditorWidget.widget(tab_index)
if tab_widget.newEditor.isModified():
ret = QMessageBox.question(self, self.tr("Save {}").format(self.tabEditorWidget.tabText(tab_index)),
self.tr("There are unsaved changes in this script. Do you want to keep those?"),
QMessageBox.Save | QMessageBox.Cancel | QMessageBox.Discard, QMessageBox.Cancel)
if ret == QMessageBox.Save:
tab_widget.save()
if tab_widget.newEditor.isModified():
# save failed, treat as cancel
return False
elif ret == QMessageBox.Discard:
pass
else:
return False
self.tabEditorWidget.removeTab(tab_index)
return True
def _toggleFind(self):
self.tabEditorWidget.currentWidget().newEditor.toggleFindWidget()
def _openFind(self):
self.tabEditorWidget.currentWidget().newEditor.openFindWidget()
def _closeFind(self):
self.tabEditorWidget.currentWidget().newEditor.closeFindWidget()
def _findNext(self):
self.tabEditorWidget.currentWidget().newEditor.findText(True)
def _findPrev(self):
self.tabEditorWidget.currentWidget().newEditor.findText(False)
def _textFindChanged(self):
if self.lineEditFind.text():
self.findNextButton.setEnabled(True)
self.findPrevButton.setEnabled(True)
self.tabEditorWidget.currentWidget().newEditor.findText(True, showMessage=False, findFirst=True)
else:
self.lineEditFind.setStyleSheet('')
self.findNextButton.setEnabled(False)
self.findPrevButton.setEnabled(False)
def onClickGoToLine(self, item, column):
tabEditor = self.tabEditorWidget.currentWidget().newEditor
if item.text(1) == 'syntaxError':
check = tabEditor.syntaxCheck()
if check and not tabEditor.isReadOnly():
self.tabEditorWidget.currentWidget().save()
return
linenr = int(item.text(1))
itemName = str(item.text(0))
charPos = itemName.find(' ')
if charPos != -1:
objName = itemName[0:charPos]
else:
objName = itemName
tabEditor.goToLine(str.encode(objName), linenr)
def toggleEditor(self, checked):
self.splitterObj.show() if checked else self.splitterObj.hide()
if not self.tabEditorWidget:
self.tabEditorWidget.enableToolBarEditor(checked)
self.tabEditorWidget.restoreTabsOrAddNew()
def toggleObjectListWidget(self, checked):
self.listClassMethod.show() if checked else self.listClassMethod.hide()
def pasteEditor(self):
self.tabEditorWidget.currentWidget().newEditor.paste()
def cutEditor(self):
self.tabEditorWidget.currentWidget().newEditor.cut()
def copyEditor(self):
self.tabEditorWidget.currentWidget().newEditor.copy()
def runScriptEditor(self):
self.tabEditorWidget.currentWidget().newEditor.runScriptCode()
def commentCode(self):
self.tabEditorWidget.currentWidget().newEditor.commentEditorCode(True)
def uncommentCode(self):
self.tabEditorWidget.currentWidget().newEditor.commentEditorCode(False)
def openScriptFileExtEditor(self):
tabWidget = self.tabEditorWidget.currentWidget()
path = tabWidget.path
import subprocess
try:
subprocess.Popen([os.environ['EDITOR'], path])
except KeyError:
QDesktopServices.openUrl(QUrl.fromLocalFile(path))
def openScriptFile(self):
lastDirPath = self.settings.value("pythonConsole/lastDirPath", QDir.homePath())
openFileTr = QCoreApplication.translate("PythonConsole", "Open File")
fileList, selected_filter = QFileDialog.getOpenFileNames(
self, openFileTr, lastDirPath, "Script file (*.py)")
if fileList:
for pyFile in fileList:
for i in range(self.tabEditorWidget.count()):
tabWidget = self.tabEditorWidget.widget(i)
if tabWidget.path == pyFile:
self.tabEditorWidget.setCurrentWidget(tabWidget)
break
else:
tabName = QFileInfo(pyFile).fileName()
self.tabEditorWidget.newTabEditor(tabName, pyFile)
lastDirPath = QFileInfo(pyFile).path()
self.settings.setValue("pythonConsole/lastDirPath", pyFile)
self.updateTabListScript(pyFile, action='append')
def saveScriptFile(self):
tabWidget = self.tabEditorWidget.currentWidget()
try:
tabWidget.save()
except (IOError, OSError) as error:
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>{0}</b> could not be saved. Error: {1}').format(tabWidget.path,
error.strerror)
self.callWidgetMessageBarEditor(msgText, 2, False)
def saveAsScriptFile(self, index=None):
tabWidget = self.tabEditorWidget.currentWidget()
if not index:
index = self.tabEditorWidget.currentIndex()
if not tabWidget.path:
fileName = self.tabEditorWidget.tabText(index) + '.py'
folder = self.settings.value("pythonConsole/lastDirPath", QDir.homePath())
pathFileName = os.path.join(folder, fileName)
fileNone = True
else:
pathFileName = tabWidget.path
fileNone = False
saveAsFileTr = QCoreApplication.translate("PythonConsole", "Save File As")
filename, filter = QFileDialog.getSaveFileName(self,
saveAsFileTr,
pathFileName, "Script file (*.py)")
if filename:
try:
tabWidget.save(filename)
except (IOError, OSError) as error:
msgText = QCoreApplication.translate('PythonConsole',
'The file <b>{0}</b> could not be saved. Error: {1}').format(tabWidget.path,
error.strerror)
self.callWidgetMessageBarEditor(msgText, 2, False)
if fileNone:
tabWidget.path = None
else:
tabWidget.path = pathFileName
return
if not fileNone:
self.updateTabListScript(pathFileName, action='remove')
def openHelpConsole(self):
QgsHelp.openHelp("plugins/python_console.html")
def openHelpAPI(self):
m = re.search(r'^([0-9]+)\.([0-9]+)\.', Qgis.QGIS_VERSION)
if m:
QDesktopServices.openUrl(QUrl('https://qgis.org/pyqgis/{}.{}/'.format(m.group(1), m.group(2))))
def openHelpCookbook(self):
m = re.search(r'^([0-9]+)\.([0-9]+)\.', Qgis.QGIS_VERSION)
if m:
QDesktopServices.openUrl(QUrl('https://docs.qgis.org/{}.{}/en/docs/pyqgis_developer_cookbook/index.html'.format(m.group(1), m.group(2))))
def openSettings(self):
iface.showOptionsDialog(iface.mainWindow(), currentPage='consoleOptions')
def updateSettings(self):
self.shell.refreshSettingsShell()
self.shellOut.refreshSettingsOutput()
self.tabEditorWidget.refreshSettingsEditor()
def callWidgetMessageBar(self, text):
self.shellOut.widgetMessageBar(iface, text)
def callWidgetMessageBarEditor(self, text, level, timed):
self.tabEditorWidget.widgetMessageBar(iface, text, level, timed)
def updateTabListScript(self, script, action=None):
if action == 'remove':
self.tabListScript.remove(script)
elif action == 'append':
if not self.tabListScript:
self.tabListScript = []
if script not in self.tabListScript:
self.tabListScript.append(script)
else:
self.tabListScript = []
self.settings.setValue("pythonConsole/tabScripts",
self.tabListScript)
def saveSettingsConsole(self):
self.settings.setValue("pythonConsole/splitterConsole", self.splitter.saveState())
self.settings.setValue("pythonConsole/splitterObj", self.splitterObj.saveState())
self.settings.setValue("pythonConsole/splitterEditor", self.splitterEditor.saveState())
self.shell.writeHistoryFile(True)
def restoreSettingsConsole(self):
storedTabScripts = self.settings.value("pythonConsole/tabScripts", [])
self.tabListScript = storedTabScripts
self.splitter.restoreState(self.settings.value("pythonConsole/splitterConsole", QByteArray()))
self.splitterEditor.restoreState(self.settings.value("pythonConsole/splitterEditor", QByteArray()))
self.splitterObj.restoreState(self.settings.value("pythonConsole/splitterObj", QByteArray()))
if __name__ == '__main__':
a = QApplication(sys.argv)
console = PythonConsoleWidget()
console.show()
a.exec_()
|
ghtmtt/QGIS
|
python/console/console.py
|
Python
|
gpl-2.0
| 36,846
|
import csv
import heapq
# csvfile1 = file('V2.0resize.csv','rb')
# csvfile2 = file('V2.0motion20_30_len_pred.csv','rb')
# csvfile3 = file('V2.0motion40_30_len_pred.csv','rb')
# csvfile4 = file('V2.0motion60_30_len_pred.csv','rb')
csvfile1 = file('V2.0resize.csv','rb')
csvfile2 = file('V2.0motion20_15_pred.csv','rb')
csvfile3 = file('V2.0motion20_30_pred.csv','rb')
csvfile4 = file('V2.0motion20_45_pred.csv','rb')
reader1 = csv.reader(csvfile1)
reader2 = csv.reader(csvfile2)
reader3 = csv.reader(csvfile3)
reader4 = csv.reader(csvfile4)
# fileinfo = open('V2.0multi_data_len_pred.csv','w')
fileinfo = open('V2.0multi_data_theta_pred.csv','w')
for line1,line2,line3,line4 in zip(reader1,reader2,reader3,reader4):
print line1[0]+','+line1[1]+'\n'+line2[0]+','+line2[1]+'\n'+line3[0]+','+line3[1]+'\n'+line4[0]+','+line4[1]
fileinfo.write(line1[0]+','+line1[1]+'\n'+line2[0]+','+line2[1]+'\n'+line3[0]+','+line3[1]+'\n'+line4[0]+','+line4[1]+'\n')
csvfile1.close()
|
DaniellaAngel/MachineLearning
|
motion/merge_csv.py
|
Python
|
apache-2.0
| 975
|
#!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the --help flag of Google C++ Testing Framework.
SYNOPSIS
gtest_help_test.py --build_dir=BUILD/DIR
# where BUILD/DIR contains the built gtest_help_test_ file.
gtest_help_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_help_test_')
FLAG_PREFIX = '--gtest_'
CATCH_EXCEPTIONS_FLAG = FLAG_PREFIX + 'catch_exceptions'
DEATH_TEST_STYLE_FLAG = FLAG_PREFIX + 'death_test_style'
UNKNOWN_FLAG = FLAG_PREFIX + 'unknown_flag_for_testing'
LIST_TESTS_FLAG = FLAG_PREFIX + 'list_tests'
INCORRECT_FLAG_VARIANTS = [re.sub('^--', '-', LIST_TESTS_FLAG),
re.sub('^--', '/', LIST_TESTS_FLAG),
re.sub('_', '-', LIST_TESTS_FLAG)]
INTERNAL_FLAG_FOR_TESTING = FLAG_PREFIX + 'internal_flag_for_testing'
SUPPORTS_DEATH_TESTS = "DeathTest" in gtest_test_utils.Subprocess(
[PROGRAM_PATH, LIST_TESTS_FLAG]).output
# The help message must match this regex.
HELP_REGEX = re.compile(
FLAG_PREFIX + r'list_tests.*' +
FLAG_PREFIX + r'filter=.*' +
FLAG_PREFIX + r'also_run_disabled_tests.*' +
FLAG_PREFIX + r'repeat=.*' +
FLAG_PREFIX + r'shuffle.*' +
FLAG_PREFIX + r'random_seed=.*' +
FLAG_PREFIX + r'color=.*' +
FLAG_PREFIX + r'print_time.*' +
FLAG_PREFIX + r'output=.*' +
FLAG_PREFIX + r'break_on_failure.*' +
FLAG_PREFIX + r'throw_on_failure.*',
re.DOTALL)
def RunWithFlag(flag):
"""Runs gtest_help_test_ with the given flag.
Returns:
the exit code and the text output as a tuple.
Args:
flag: the command-line flag to pass to gtest_help_test_, or None.
"""
if flag is None:
command = [PROGRAM_PATH]
else:
command = [PROGRAM_PATH, flag]
child = gtest_test_utils.Subprocess(command)
return child.exit_code, child.output
class GTestHelpTest(gtest_test_utils.TestCase):
"""Tests the --help flag and its equivalent forms."""
def TestHelpFlag(self, flag):
"""Verifies correct behavior when help flag is specified.
The right message must be printed and the tests must
skipped when the given flag is specified.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assertEquals(0, exit_code)
self.assert_(HELP_REGEX.search(output), output)
if IS_WINDOWS:
self.assert_(CATCH_EXCEPTIONS_FLAG in output, output)
else:
self.assert_(CATCH_EXCEPTIONS_FLAG not in output, output)
if SUPPORTS_DEATH_TESTS and not IS_WINDOWS:
self.assert_(DEATH_TEST_STYLE_FLAG in output, output)
else:
self.assert_(DEATH_TEST_STYLE_FLAG not in output, output)
def TestNonHelpFlag(self, flag):
"""Verifies correct behavior when no help flag is specified.
Verifies that when no help flag is specified, the tests are run
and the help message is not printed.
Args:
flag: A flag to pass to the binary or None.
"""
exit_code, output = RunWithFlag(flag)
self.assert_(exit_code != 0)
self.assert_(not HELP_REGEX.search(output), output)
def testPrintsHelpWithFullFlag(self):
self.TestHelpFlag('--help')
def testPrintsHelpWithShortFlag(self):
self.TestHelpFlag('-h')
def testPrintsHelpWithQuestionFlag(self):
self.TestHelpFlag('-?')
def testPrintsHelpWithWindowsStyleQuestionFlag(self):
self.TestHelpFlag('/?')
def testPrintsHelpWithUnrecognizedGoogleTestFlag(self):
self.TestHelpFlag(UNKNOWN_FLAG)
def testPrintsHelpWithIncorrectFlagStyle(self):
for incorrect_flag in INCORRECT_FLAG_VARIANTS:
self.TestHelpFlag(incorrect_flag)
def testRunsTestsWithoutHelpFlag(self):
"""Verifies that when no help flag is specified, the tests are run
and the help message is not printed."""
self.TestNonHelpFlag(None)
def testRunsTestsWithGtestInternalFlag(self):
"""Verifies that the tests are run and no help message is printed when
a flag starting with Google Test prefix and 'internal_' is supplied."""
self.TestNonHelpFlag(INTERNAL_FLAG_FOR_TESTING)
if __name__ == '__main__':
gtest_test_utils.Main()
|
sfiera/googletest
|
test/gtest_help_test.py
|
Python
|
bsd-3-clause
| 5,754
|
# -*- coding: utf-8 -*-
""" standard """
from random import randint
import re
""" custom """
from examples.working_init import *
from threatconnect.Config.ResourceType import ResourceType
#
# CHANGE FOR YOUR TESTING ENVIRONMENT
# - These incidents must be created before running this script
#
owner = 'Example Community' # org or community
lu_id = 34 # incident id for loop update
mu_id = 35 # incident id for manual update
# dl_id = 999999 # threat id to delete
adversary_id = 5 # adversary resource id to associate with incident
victim_id = 1 # victim resource id to associate with incident
ip_address = '10.20.30.40' # email address to associate to adversary
rn = randint(1, 1000) # random number generator for testing
def main():
""" """
# (Optional) SET THREAT CONNECT LOG (TCL) LEVEL
tc.set_tcl_file('log/tc.log', 'debug')
tc.set_tcl_console_level('critical')
# (Required) Instantiate a Resource Object
resources = tc.incidents()
#
# (Optional) retrieve results from API and update selected resource in loop
#
# filters can be set to limit search results
try:
filter1 = resources.add_filter()
filter1.add_owner(owner) # filter on owner
except AttributeError as e:
print('Error: {0!s}'.format(e))
sys.exit(1)
try:
resources.retrieve()
except RuntimeError as e:
print('Error: {0!s}'.format(e))
sys.exit(1)
for res in resources:
# a particular resource can be matched by ID, Name or any other supported attribute
if res.id == lu_id:
#
# once a resource is matched any metadata on that resource can be updated
#
res.set_name('LU Incident #{0:d}'.format(rn))
# additional properties can be updated
res.set_event_date('2015-03-{0:d}T00:00:00Z'.format(randint(1, 30)))
#
# working with indicator associations
#
# existing indicator associations can be retrieved and iterated through
for association in res.indicator_associations:
# add delete flag to all indicator association that have a confidence under 10
if association.confidence < 10:
res.disassociate_indicator(association.resource_type, association.indicator)
# indicator associations can be added to a resource by providing the resource type and value
res.associate_indicator(ResourceType.ADDRESSES, ip_address)
#
# working with group associations
#
# existing group associations can be retrieved and iterated through
for association in res.group_associations:
# add delete flag to all group association that match DELETE
if re.findall('LU', association.name):
res.disassociate_group(association.resource_type, association.id)
# group associations can be added to a resource by providing the resource type and id
res.associate_group(ResourceType.ADVERSARIES, adversary_id)
#
# working with victim associations
#
# existing victim associations can be retrieved and iterated through
for association in res.victim_associations:
# add delete flag to all group association that match DELETE
if re.findall('LU', association.name):
res.disassociate_victim(association.id)
# victim associations can be added to a resource by providing the resource id
res.associate_victim(victim_id)
#
# working with attributes
#
# existing attributes can be loaded into the resource and iterated through
res.load_attributes()
for attribute in res.attributes:
# add delete flag to all attributes that have 'test' in the value.
if re.findall('test', attribute.value):
res.delete_attribute(attribute.id)
# add update flag to all attributes that have 'update' in the value.
if re.findall('update', attribute.value):
res.update_attribute(attribute.id, 'updated attribute #{0:d}'.format(rn))
# attributes can be added to a resource by providing the attribute type and value
res.add_attribute('Description', 'test attribute #{0:d}'.format(rn))
#
# working with tags
#
# existing tags can be loaded into the resource and iterated through
res.load_tags()
for tag in res.tags:
# add delete flag to all tags that have 'DELETE' in the name.
if re.findall('DELETE', tag.name):
res.delete_tag(tag.name)
# tags can be added to a resource by providing the tags value
res.add_tag('DELETE #{0:d}'.format(rn))
# (Required) commit this resource
try:
print('Updating resource {0!s}.'.format(res.name))
res.commit()
except RuntimeError as e:
print('Error: {0!s}'.format(e))
sys.exit(1)
#
# (Optional) delete resource if required
#
# delete to any resource that has 'DELETE' in the name.
elif re.findall('DELETE', res.name):
try:
print('Deleting resource {0!s}.'.format(res.name))
res.delete() # this action is equivalent to commit
except RuntimeError as e:
print('Error: {0!s}'.format(e))
sys.exit(1)
#
# (Optional) ADD RESOURCE EXAMPLE
#
# new resources can be added with the resource add method
resource = resources.add('DELETE #{0:d}'.format(rn), owner)
# additional properties can be added
resource.set_event_date('2015-03-{0:d}T00:00:00Z'.format(randint(1, 30)))
# attributes can be added to the new resource
resource.add_attribute('Description', 'Delete Example #{0:d}'.format(rn))
# tags can be added to the new resource
resource.add_tag('TAG #{0:d}'.format(rn))
# the security label can be set on the new resource
resource.set_security_label('TLP Green')
# commit this resource and add attributes, tags and security labels
try:
print('Adding resource {0!s}.'.format(resource.name))
resource.commit()
except RuntimeError as e:
print('Error: {0!s}'.format(e))
sys.exit(1)
#
# (Optional) UPDATE RESOURCE EXAMPLE
#
# existing resources can also be updated with the resource add method
resource = resources.add('MU Incident #{0:d}'.format(rn), owner) # this will overwrite exising resource name
resource.set_id(mu_id) # set the id to the existing resource
# additional properties can be updated
resource.set_event_date('2015-03-{0:d}T00:00:00Z'.format(randint(1, 30)))
# existing attributes can be loaded for modification or deletion
resource.load_attributes()
for attribute in resource.attributes:
if attribute.type == 'Description':
resource.delete_attribute(attribute.id)
# attributes can be added to the existing resource
resource.add_attribute('Description', 'Manual Update Example #{0:d}'.format(rn))
# existing tags can be loaded for modification or deletion
resource.load_tags()
for tag in resource.tags:
resource.delete_tag(tag.name)
# tags can be added to the existing resource
resource.add_tag('TAG #{0:d}'.format(rn))
# commit this resource and add attributes, tags and security labels
try:
print('Updating resource {0!s}.'.format(resource.name))
resource.commit()
except RuntimeError as e:
print('Error: {0!s}'.format(e))
sys.exit(1)
#
# (Optional) DELETE RESOURCE EXAMPLE
#
# resources can be deleted with the resource add method
# resource = resources.add(''.format(rn), owner) # a valid resource name is not required
# resource.set_id(dl_id)
#
# # delete this resource
# try:
# resource.delete()
# except RuntimeError as e:
# print(e)
# (Optional) DISPLAY A COMMIT REPORT
print(tc.report.stats)
# display any failed api calls
for fail in tc.report.failures:
print(fail)
if __name__ == "__main__":
main()
sys.exit()
|
percipient/threatconnect-python
|
examples/commit/incidents_commit.py
|
Python
|
apache-2.0
| 8,526
|
# SPDX-License-Identifier: MIT
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2020 Tobias Gruetzmacher
# Copyright (C) 2019-2020 Daniel Ring
from re import compile, escape, IGNORECASE
from ..scraper import _BasicScraper, _ParserScraper
from ..util import tagre
from ..helpers import bounceStarter
from .common import _ComicControlScraper, _WPNavi, _WPNaviIn, _WPWebcomic
class WapsiSquare(_WPNaviIn):
url = 'http://wapsisquare.com/'
firstStripUrl = url + 'comic/09092001/'
def shouldSkipUrl(self, url, data):
"""Skip pages without images."""
return data.xpath('//iframe') # videos
class WastedTalent(_ParserScraper):
url = 'http://www.wastedtalent.ca/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % 'anime-crack'
imageSearch = '//div[d:class("comic_content")]/img'
prevSearch = '//li[d:class("previous")]/a'
multipleImagesPerStrip = True
class WebcomicName(_ParserScraper):
url = 'https://webcomicname.com/'
imageSearch = '//figure[d:class("tmblr-full")]//img'
prevSearch = '//a[d:class("next")]'
multipleImagesPerStrip = True
class Weregeek(_ParserScraper):
url = 'http://www.weregeek.com/'
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2006/11/27'
imageSearch = '//div[@id="comic"]/img'
prevSearch = '//a[./img[@alt="Previous"]]'
help = 'Index format: yyyy/mm/dd'
class WereIWolf(_ParserScraper):
stripUrl = 'https://wolfwares.ca/comics/Were I wolf/strip2.php?name=%s&start=%s'
url = stripUrl % ('4 Black and White - part 3', 'latest')
firstStripUrl = stripUrl % ('1 Sirens', '0')
imageSearch = '//img[contains(@src, "ROW")]'
prevSearch = '//a[./img[contains(@src, "previous")]]'
multipleImagesPerStrip = True
endOfLife = True
chapters = ('1 Sirens',
'2 Black and White',
'3 Black and White - Princess and Knight',
'4 Black and White - part 3')
def namer(self, imageUrl, pageUrl):
# Prepend chapter number to image filename
for chapter in self.chapters:
if chapter in pageUrl:
chapterNum = chapter[0]
return chapterNum + '_' + imageUrl.rsplit('/', 1)[-1]
def getPrevUrl(self, url, data):
# Fix missing navigation links between chapters
if url == self.stripUrl % (self.chapters[3], '0'):
return self.stripUrl % (self.chapters[2], 'latest')
if url == self.stripUrl % (self.chapters[2], '0'):
return self.stripUrl % (self.chapters[1], 'latest')
if url == self.stripUrl % (self.chapters[1], '0'):
return self.stripUrl % (self.chapters[0], 'latest')
return super(WereIWolf, self).getPrevUrl(url, data)
def getIndexStripUrl(self, index):
# Get comic strip URL from index
index = index.split('-')
return self.stripUrl % (index[0], index[1])
class WhiteNoise(_WPWebcomic):
url = 'http://whitenoisecomic.com/'
stripUrl = url + 'comic/%s/'
firstStripUrl = stripUrl % 'book-one'
imageSearch = '//div[@id="comic"]//img'
class WhiteNoiseLee(_ComicControlScraper):
url = 'http://www.white-noise-comic.com/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % '1-0'
starter = bounceStarter
def namer(self, imageUrl, pageUrl):
return pageUrl.rsplit('/', 1)[-1] + '.' + imageUrl.rsplit('.', 1)[-1]
class Whomp(_ComicControlScraper):
url = 'http://www.whompcomic.com/'
firstStripUrl = url + 'comic/06152010'
textSearch = '//img[@id="cc-comic"]/@title'
class WhyTheLongFace(_BasicScraper):
baseUrl = 'http://www.absurdnotions.org/'
rurl = escape(baseUrl)
url = baseUrl + 'wtlf200709.html'
stripUrl = baseUrl + 'wtlf%s.html'
firstStripUrl = stripUrl % '200306'
imageSearch = compile(r'<img src="(%swtlf.+?|lf\d+.\w{1,4})"' % rurl,
IGNORECASE)
multipleImagesPerStrip = True
prevSearch = compile(r'HREF="(.+?)"><IMG SRC="nprev.gif" ')
help = 'Index format: yyyymm'
class Wigu(_ParserScraper):
stripUrl = 'http://www.wigucomics.com/adventures/index.php?comic=%s'
url = stripUrl % '-1'
firstStripUrl = stripUrl % '1'
imageSearch = '//div[@id="comic"]//img[contains(@src, "/comics/")]'
prevSearch = '//a[@alt="go back"]'
endOfLife = True
help = 'Index format: n'
class WildeLife(_ComicControlScraper):
url = 'http://www.wildelifecomic.com/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % '1'
class WintersLight(_ParserScraper):
url = 'https://winterslight.anaria.net/'
stripUrl = url + 'comic/%s'
firstStripUrl = stripUrl % 'winterslight00'
imageSearch = '//img[contains(@src, "comic/pages/")]'
prevSearch = '//a[contains(text(), "Previous")]'
class Wonderella(_BasicScraper):
url = 'http://nonadventures.com/'
rurl = escape(url)
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '2006/09/09/the-torment-of-a-thousand-yesterdays'
imageSearch = compile(tagre("div", "id", r"comic", quote=r'["\']') +
r"\s*" +
tagre("img", "src", r'(%scomics/[^"]+)' % rurl))
prevSearch = compile(tagre("a", "href", r'(%s\d+/\d+/\d+/[^"]+)' % rurl,
after="prev"))
help = 'Index format: yyyy/mm/dd/name'
class Wondermark(_BasicScraper):
url = 'http://wondermark.com/'
stripUrl = url + '%s/'
firstStripUrl = stripUrl % '001'
imageSearch = compile(r'<img src="(http://wondermark.com/c/.+?)"')
prevSearch = compile(r'<a href="(.+?)" rel="prev">')
help = 'Index format: nnn'
class WorldOfMrToast(_BasicScraper):
baseUrl = 'http://www.theimaginaryworld.com/'
url = baseUrl + 'mrTcomicA.html'
imageSearch = compile(tagre("img", "src", r'(comic[^"]+)'))
# list the archive links since there is no prev/next navigation
prevurls = (
url,
baseUrl + 'mrTcomicW02.html',
baseUrl + 'mrTcomicW01.html',
baseUrl + 'mrGcomic03.html',
baseUrl + 'mrGcomic02.html',
baseUrl + 'mrGcomic01.html',
baseUrl + 'mrTcomicT05.html',
baseUrl + 'mrTcomicT04.html',
baseUrl + 'mrTcomicT03.html',
baseUrl + 'mrTcomicT02.html',
baseUrl + 'mrTcomicT01.html',
baseUrl + 'mrTcomicIW3.html',
baseUrl + 'mrTcomicIW2.html',
baseUrl + 'mrTcomicIW1.html',
)
firstStripUrl = prevurls[-1]
multipleImagesPerStrip = True
endOfLife = True
def getPrevUrl(self, url, data):
idx = self.prevurls.index(url)
try:
return self.prevurls[idx + 1]
except IndexError:
return None
class WormWorldSaga(_BasicScraper):
url = 'http://www.wormworldsaga.com/'
stripUrl = url + 'chapters/%s/index.php'
firstStripUrl = stripUrl % 'chapter01/EN'
imageSearch = (
compile(tagre("img", "src", r'(images/CH\d+_\d+\.[^"]+)')),
compile(tagre("img", "src", r'(panels/CH\d+_[^"]+)')),
)
latestChapter = 5
multipleImagesPerStrip = True
def starter(self):
return '%schapters/chapter%02d/%s/index.php' % (
self.url, self.latestChapter, self.lang.upper())
def getPrevUrl(self, url, data):
"""Find previous URL."""
if 'chapter04' in url:
return url.replace('chapter04', 'chapter03')
if 'chapter03' in url:
return url.replace('chapter03', 'chapter02')
if 'chapter02' in url:
return url.replace('chapter02', 'chapter01')
return None
class WormWorldSagaFrench(WormWorldSaga):
lang = 'fr'
class WormWorldSagaGerman(WormWorldSaga):
lang = 'de'
class WormWorldSagaSpanish(WormWorldSaga):
lang = 'es'
class Wrongside(_WPNavi):
url = 'http://www.ayzewi.com/comic/'
stripUrl = url + '?comic=%s'
firstStripUrl = stripUrl % 'intro-2'
|
webcomics/dosage
|
dosagelib/plugins/w.py
|
Python
|
mit
| 7,981
|
#from http://stackoverflow.com/questions/20076030/lack-of-speedup-and-erroneous-results-with-openmp-and-cython/20183767#20183767
#pythran export summation(float32[][], float32[], float32[][])
#runas import numpy as np ; N=30 ; pos = np.arange(N*3., dtype=np.float32).reshape((N,3)) ; w = np.ones(N, dtype=np.float32) ; p = np.arange(N*3., dtype=np.float32).reshape((N,3)) ; summation(pos, w, p)
#bench import numpy as np ; N=300 ; pos = np.arange(1, N*3. + 1, dtype=np.float32).reshape((N,3)) ; w = np.ones(N, dtype=np.float32) ; p = np.arange(N*3., dtype=np.float32).reshape((N,3)) ; summation(pos, w, p)
import numpy as np
def summation(pos, weights, points):
n_points = len(points)
n_weights = len(weights)
sum_array3d = np.zeros((n_points,3))
def compute(i):
pxi = points[i, 0]
pyi = points[i, 1]
pzi = points[i, 2]
total = 0.0
for j in xrange(n_weights):
weight_j = weights[j]
xj = pos[j,0]
yj = pos[j,1]
zj = pos[j,2]
dx = pxi - pos[j, 0]
dy = pyi - pos[j, 1]
dz = pzi - pos[j, 2]
dr = 1.0/np.sqrt(dx*dx + dy*dy + dz*dz)
total += weight_j * dr
sum_array3d[i,0] += weight_j * dx
sum_array3d[i,1] += weight_j * dy
sum_array3d[i,2] += weight_j * dz
return total
sum_array = np.array([compute(i) for i in xrange(n_points)])
return sum_array, sum_array3d
|
artas360/pythran
|
pythran/tests/cases/sumarray3d.py
|
Python
|
bsd-3-clause
| 1,367
|
import codecs
import os
import sys
from distutils.util import convert_path
from fnmatch import fnmatchcase
from setuptools import setup, find_packages
def read(fname):
return codecs.open(os.path.join(os.path.dirname(__file__), fname)).read()
# Provided as an attribute, so you can append to these instead
# of replicating them:
standard_exclude = ["*.py", "*.pyc", "*$py.class", "*~", ".*", "*.bak"]
standard_exclude_directories = [
".*", "CVS", "_darcs", "./build", "./dist", "EGG-INFO", "*.egg-info"
]
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
# Note: you may want to copy this into your setup.py file verbatim, as
# you can't import this from another package, when you don't know if
# that package is installed yet.
def find_package_data(
where=".",
package="",
exclude=standard_exclude,
exclude_directories=standard_exclude_directories,
only_in_packages=True,
show_ignored=False
):
"""
Return a dictionary suitable for use in ``package_data``
in a distutils ``setup.py`` file.
The dictionary looks like::
{"package": [files]}
Where ``files`` is a list of all the files in that package that
don"t match anything in ``exclude``.
If ``only_in_packages`` is true, then top-level directories that
are not packages won"t be included (but directories under packages
will).
Directories matching any pattern in ``exclude_directories`` will
be ignored; by default directories with leading ``.``, ``CVS``,
and ``_darcs`` will be ignored.
If ``show_ignored`` is true, then all the files that aren"t
included in package data are shown on stderr (for debugging
purposes).
Note patterns use wildcards, or can be exact paths (including
leading ``./``), and all searching is case-insensitive.
"""
out = {}
stack = [(convert_path(where), "", package, only_in_packages)]
while stack:
where, prefix, package, only_in_packages = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where, name)
if os.path.isdir(fn):
bad_name = False
for pattern in exclude_directories:
if fnmatchcase(name, pattern) or fn.lower() == pattern.lower():
bad_name = True
if show_ignored:
print >> sys.stderr, (
"Directory %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
if os.path.isfile(os.path.join(fn, "__init__.py")) and not prefix:
if not package:
new_package = name
else:
new_package = package + "." + name
stack.append((fn, "", new_package, False))
else:
stack.append((fn, prefix + name + "/", package, only_in_packages))
elif package or not only_in_packages:
# is a file
bad_name = False
for pattern in exclude:
if fnmatchcase(name, pattern) or fn.lower() == pattern.lower():
bad_name = True
if show_ignored:
print >> sys.stderr, (
"File %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
out.setdefault(package, []).append(prefix + name)
return out
PACKAGE = "coinbase"
NAME = "django-coinbase"
DESCRIPTION = "a Django app for receiving payment notifications from Coinbase"
AUTHOR = "Patrick Altman"
AUTHOR_EMAIL = "paltman@eldarion.com"
URL = "https://github.com/eldarion/django-coinbase"
VERSION = __import__(PACKAGE).__version__
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=read("README.rst"),
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="BSD",
url=URL,
packages=find_packages(exclude=["tests.*", "tests"]),
package_data=find_package_data(PACKAGE, only_in_packages=False),
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
],
install_requires=[
"django-jsonfield>=0.8",
"django>=1.4",
"requests>=1.2.0"
],
zip_safe=False,
)
|
eldarion/django-coinbase
|
setup.py
|
Python
|
bsd-3-clause
| 4,854
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Test function from the generation of figures."""
import math
import gsd.hoomd
from hypothesis import given
from hypothesis.strategies import floats
from statdyn.analysis.order import compute_voronoi_neighs
from statdyn.figures import colour
from statdyn.figures.configuration import plot, snapshot2data
@given(floats(min_value=-math.pi, max_value=math.pi))
def test_colour_orientation(orientation):
"""Ensure hex values being returned by colour_orientation."""
int(colour.colour_orientation(orientation)[1:], 16)
def test_plot():
with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj:
plot(trj[0], repeat=True, offset=True)
def test_snapshot2data():
with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj:
snapshot2data(trj[0])
def test_order():
with gsd.hoomd.open('test/data/trajectory-13.50-3.00.gsd') as trj:
order_list = compute_voronoi_neighs(trj[0].configuration.box,
trj[0].particles.position)
plot(trj[0], repeat=True, offset=True, order_list=order_list)
|
malramsay64/MD-Molecules-Hoomd
|
test/figures_test.py
|
Python
|
mit
| 1,269
|
#
# This is a clone of libdrm/tests/modetest/modetest.c
#
# DRM based mode setting test program
# Copyright 2008 Tungsten Graphics
# Jakob Bornecrantz <jakob@tungstengraphics.com>
# Copyright 2008 Intel Corporation
# Jesse Barnes <jesse.barnes@intel.com>
import os, sys
import argparse
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "../"))) # use pydrm from parent dir
from pydrm import Drm
from pydrm.property import DrmPropertyEnum, DrmPropertyBitmask
from pydrm.format import DrmFormat
from utils import *
# for Python 2
try:
input = raw_input
except NameError:
pass
def fail(msg):
sys.stderr.write(msg)
exit(1)
def connector_status_name(status):
if status == 1:
return "connected"
elif status == 2:
return "disconnected"
else:
return "unknown"
mode_type_names = [
"builtin",
"clock_c",
"crtc_c",
"preferred",
"default",
"userdef",
"driver"]
def mode_type_str(type_):
s = ""
sep = ""
for i in range(len(mode_type_names)):
if type_ & (1 << i):
s += "%s%s" % (sep, mode_type_names[i])
sep = ", "
return s
mode_flag_names = [
"phsync",
"nhsync",
"pvsync",
"nvsync",
"interlace",
"dblscan",
"csync",
"pcsync",
"ncsync",
"hskew",
"bcast",
"pixmux",
"dblclk",
"clkdiv2"]
def mode_flag_str(flags):
s = ""
sep = ""
for i in range(len(mode_flag_names)):
if flags & (1 << i):
s += "%s%s" % (sep, mode_flag_names[i])
sep = ", "
return s
def dump_mode(mode):
s = "%s %d %d %d %d %d %d %d %d %d %d" % (
mode.name,
mode.vrefresh,
mode.hdisplay,
mode.hsync_start,
mode.hsync_end,
mode.htotal,
mode.vdisplay,
mode.vsync_start,
mode.vsync_end,
mode.vtotal,
mode.clock)
s += " flags: "
s += mode_flag_str(mode.flags)
s += "; type: "
s += mode_type_str(mode.type)
s += "\n"
return s
def dump_prop(prop):
s = "\t%d" % prop.id
s += " %s:\n" % prop.name
s += "\t\tflags: "
if prop.immutable:
s += "immutable "
s += "%s\n" % prop.type_name
# if (drm_property_type_is(prop, DRM_MODE_PROP_SIGNED_RANGE)) {
# printf("\t\tvalues:");
# for (i = 0; i < prop->count_values; i++)
# printf(" %"PRId64, U642I64(prop->values[i]));
# printf("\n");
# }
#
# if (drm_property_type_is(prop, DRM_MODE_PROP_RANGE)) {
# printf("\t\tvalues:");
# for (i = 0; i < prop->count_values; i++)
# printf(" %"PRIu64, prop->values[i]);
# printf("\n");
# }
#
# if (drm_property_type_is(prop, DRM_MODE_PROP_ENUM)) {
# printf("\t\tenums:");
# for (i = 0; i < prop->count_enums; i++)
# printf(" %s=%llu", prop->enums[i].name,
# prop->enums[i].value);
# printf("\n");
if isinstance(prop, DrmPropertyEnum):
s += "\t\tenums:"
for val, name in prop.enum.items():
s += " %s=%s" % (name, val)
s += "\n"
# } else if (drm_property_type_is(prop, DRM_MODE_PROP_BITMASK)) {
# printf("\t\tvalues:");
# for (i = 0; i < prop->count_enums; i++)
# printf(" %s=0x%llx", prop->enums[i].name,
# (1LL << prop->enums[i].value));
# printf("\n");
elif isinstance(prop, DrmPropertyBitmask):
s += "\t\tvalues:"
for val, name in prop.enum.items():
s += " %s=0x%x" % (name, 1 << val)
s += "\n"
# } else {
# assert(prop->count_enums == 0);
# }
#
# if (drm_property_type_is(prop, DRM_MODE_PROP_BLOB)) {
# printf("\t\tblobs:\n");
# for (i = 0; i < prop->count_blobs; i++)
# dump_blob(dev, prop->blob_ids[i]);
# printf("\n");
# } else {
# assert(prop->count_blobs == 0);
# }
s += "\t\tvalue:"
# if (drm_property_type_is(prop, DRM_MODE_PROP_BLOB))
# dump_blob(dev, value);
# else if (drm_property_type_is(prop, DRM_MODE_PROP_SIGNED_RANGE))
# printf(" %"PRId64"\n", value);
# else
# printf(" %"PRIu64"\n", value);
s += " %s\n" % prop.value
return s
def dump_props(props):
if len(props.props):
s = " props:\n"
else:
return ""
for prop in props.props:
s += dump_prop(prop)
return s
def dump_encoders(drm):
s = "Encoders:\n"
s += "id\tcrtc\ttype\tpossible crtcs\tpossible clones\n"
for encoder in drm.encoders:
s += "%d\t%d\t%s\t%-15s\t%-15s\n" % (
encoder.id,
encoder.crtc.id if encoder.crtc else 0,
encoder.type_name,
', '.join([str(crtc.id) for crtc in encoder.possible_crtcs]),
', '.join([str(encoder.id) for encoder in encoder.possible_clones]))
return s
def dump_connectors(drm):
s = "Connectors:\n"
s += "id\tencoder\tstatus\t\tname\t\tsize (mm)\tmodes\tencoders\n"
for connector in drm.connectors:
s += "%d\t%d\t%s\t%-15s\t%dx%d\t\t%d\t" %(
connector.id,
connector.encoder.id if connector.encoder else 0,
connector_status_name(connector.status),
connector.name,
connector.mm_width, connector.mm_height,
len(connector.modes))
for j in range(len(connector.encoders)):
if j > 0:
s += ", "
s += "%d" % connector.encoders[j].id
s += "\n"
if len(connector.modes):
s += " modes:\n"
s += "\tname refresh (Hz) hdisp hss hse htot vdisp vss vse vtot)\n"
for mode in connector.modes:
s += "\t%s" % dump_mode(mode)
s += dump_props(connector.props)
return s
def dump_crtcs(drm):
s = "CRTCs:\n"
s += "id\tfb\tpos\tsize\n"
for crtc in drm.crtcs:
s += "%d\t%d\t(%d,%d)\t(%dx%d)\n" % (
crtc.id,
crtc.fb.id if crtc.fb else 0,
crtc.x, crtc.y,
crtc.width, crtc.height)
s += dump_mode(crtc.mode) if crtc.mode else ""
s += dump_props(crtc.props)
return s
def dump_planes(drm):
s = "Planes:\n"
s += "id\tcrtc\tfb\tCRTC x,y\tx,y\tgamma size\tpossible crtcs\n"
for plane in drm.planes:
s += "%d\t%d\t%d\t%d,%d\t\t%d,%d\t%-8d\t%s\n" % (
plane.id,
plane.crtc.id if plane.crtc else 0,
plane.fb.id if plane.fb else 0,
# FIXME: what are these?
# plane.crtc_x, plane.crtc_y, plane.x, plane.y,
999, 999, 999, 999,
plane.gamma_size,
', '.join([str(crtc.id) for crtc in plane.possible_crtcs]))
if not plane.formats:
continue
s += " formats:"
for format_ in plane.formats:
s += " %s" % format_.name
s += "\n"
s += dump_props(plane.props)
return s
def dump_framebuffers(drm):
s = "Frame buffers:\n"
s += "id\tsize\t\tpitch\n"
for fb in drm.framebuffers:
s += "%u\t(%ux%u)\t%u\n" %(
fb.id,
fb.width, fb.height,
fb.pitch)
return s
parser = argparse.ArgumentParser(epilog="Default is to dump all info on the first available device")
parser.add_argument("-c", help="list connectors", action="store_true")
parser.add_argument("-e", help="list encoders", action="store_true")
parser.add_argument("-f", help="list framebuffers", action="store_true")
parser.add_argument("-p", help="list CRTCs and planes (pipes)", action="store_true")
test = parser.add_argument_group('Test options')
# fprintf(stderr, "\n Test options:\n\n");
# fprintf(stderr, "\t-P <crtc_id>:<w>x<h>[+<x>+<y>][*<scale>][@<format>]\tset a plane\n");
# struct pipe_arg {
# const char **cons;
# uint32_t *con_ids;
# unsigned int num_cons;
# uint32_t crtc_id;
# char mode_str[64];
# char format_str[5];
# unsigned int vrefresh;
# unsigned int fourcc;
# drmModeModeInfo *mode;
# struct crtc *crtc;
# unsigned int fb_id[2], current_fb_id;
# struct timeval start;
#
# int swap_count;
# };
class PipeArgs(object):
def __init__(self):
self.connector_ids = []
self.crtcid = None
self.modestr = ""
self.vrefresh = None
self.formatstr = ""
self.format = None
self.connectors = []
self.mode = None
self.crtcs = []
class SetModeAction(argparse.Action):
# def __init__(self, nargs=0, **kw):
# super().__init__(nargs=nargs, **kw)
# def __init__(self, option_strings, dest, nargs=0, **kwargs):
# super(SetModeAction, self).__init__(option_strings, dest, nargs=nargs, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
#print '%r %r %r' % (namespace, values, option_string)
pipeargs = getattr(namespace, self.dest)
if pipeargs is None:
pipeargs = []
setattr(namespace, self.dest, pipeargs)
p = PipeArgs()
pipeargs.append(p)
if values is None:
return
connstr, delim, modestr = values.partition(':')
if ':' in modestr:
raise argparse.ArgumentError(self, 'extra :')
if connstr:
connid, delim, crtcid = connstr.partition('@')
if connid:
p.connector_ids = [int(i) for i in connid.split(',')]
if '@' in crtcid:
raise argparse.ArgumentError(self, 'extra @')
if crtcid:
try:
p.crtcid = int(crtcid)
except ValueError:
raise argparse.ArgumentError(self, "crtc_id=%s is not an int" % crtcid)
if modestr:
#print modestr
mode, delim, format_ = modestr.partition('@')
#print mode
if '@' in format_:
raise argparse.ArgumentError(self, 'extra @')
mode, delim, vrefresh = mode.partition('-')
#print "mode %s\n" % mode
#print "delim %s\n" % delim
#print "vrefresh %s\n" % vrefresh
if mode:
p.modestr = mode
if vrefresh:
try:
p.vrefresh = int(vrefresh)
except ValueError:
raise argparse.ArgumentError(self, "vrefresh=%s is not an int" % vrefresh)
if format_:
p.formatstr = format_
# fprintf(stderr, "\t-s <connector_id>[,<connector_id>][@<crtc_id>]:<mode>[-<vrefresh>][@<format>]\tset a mode\n");
test.add_argument('-s', action=SetModeAction, nargs='?', help="set mode [<connector_id>,][@<crtc_id>][:[<mode>[-<vrefresh>]][@<format>]]")
test.add_argument("-C", help="test hw cursor", action="store_true")
test.add_argument("-v", help="test vsynced page flipping", action="store_true")
class SetPropertyAction(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if not hasattr(namespace, 'props'):
namespace.props = []
try:
obj_id, prop_name, value = values.split(':')
except:
raise argparse.ArgumentError(self, "could not parse '%s'" % values)
try:
obj_id = int(obj_id)
except ValueError:
raise argparse.ArgumentError(self, "obj_id=%s is not an int" % obj_id)
namespace.props.append([obj_id, prop_name, value])
test.add_argument('-w', action=SetPropertyAction, help="set property <obj_id>:<prop_name>:<value>")
parser.add_argument("-d", help="drop master after mode set", action="store_true")
parser.add_argument("-M", help="use the given driver", metavar=('module'))
parser.add_argument("-D", help="use the given device", type=int, metavar=('device'))
parser.add_argument("--pattern", help="test pattern (default:smpte)", choices=['smpte', 'mono'], default="smpte")
args = parser.parse_args()
# default dump all
if not any([args.c, args.e, args.f, args.p, args.w, args.s, args.C, args.v]):
args.c = args.e = args.f = args.p = True
class Device(object):
def __init__(self, drm):
self.drm = drm
self.width = 0
self.height = 0
self.fb = None
self.bo = None
self.cursor = None
if args.D:
drm = Drm(args.D)
elif args.M:
for i in range(64):
if os.path.exists("/dev/dri/card%d" % i):
drm = Drm(i)
if drm.version.name == args.M:
break
else:
drm = Drm()
if args.e:
print(dump_encoders(drm))
if args.c:
print(dump_connectors(drm))
if args.p:
print(dump_crtcs(drm))
print(dump_planes(drm))
if args.f:
print(dump_framebuffers(drm))
if args.s:
if not drm.cap.DRM_CAP_DUMB_BUFFER:
fail("driver doesn't support the dumb buffer API\n")
dev = Device(drm)
x = 0
for pipe in args.s:
if pipe.connector_ids:
for conn_id in pipe.connector_ids:
conn = [conn for conn in drm.connectors if conn.id == conn_id]
if not conn:
fail("couldn't find connector with id: %s\n" % conn_id)
pipe.connectors.extend(conn)
else:
# default is the first connected connector
pipe.connectors = drm.find_connectors()[:1]
if not pipe.connectors:
fail("couldn't find any connected connector\n")
pipe.mode = pipe.connectors[0].find_mode(pipe.modestr, pipe.vrefresh)
if not pipe.mode:
fail("mode not found: %s %s\n" % (pipe.modestr, pipe.vrefresh if pipe.vrefresh else ""))
if pipe.crtcid:
pipe.crtcs = [crtc for crtc in drm.crtcs if crtc.id == pipe.crtcid]
else:
pipe.crtcs = drm.find_crtcs(*pipe.connectors)
if not pipe.crtcs:
fail("crtc not found %s\n" % pipe.crtcid if pipe.crtcid else "")
pipe.crtc = pipe.crtcs[0]
dev.width += pipe.mode.hdisplay
if dev.height < pipe.mode.vdisplay:
dev.height = pipe.mode.vdisplay
pipe.planes = drm.find_planes(pipe.crtc)
if not pipe.planes:
fail("couldn't find plane for crtc %s\n" % pipe.crtc.id)
pipe.plane = pipe.planes[0]
if pipe.formatstr:
pipe.format = None
try:
pipe.format = DrmFormat(pipe.formatstr)
except NotImplementedError:
fail("format '%s' is not supported\n" % pipe.formatstr)
if not pipe.format in pipe.plane.formats:
fail("format '%s' is not supported by the plane\n" % pipe.formatstr)
else:
pipe.format = pipe.plane.preferred_format
from pydrm.image import DrmImageFramebuffer
dev.fb = DrmImageFramebuffer(dev.drm, pipe.format, dev.width, dev.height)
pipe.crtc.set(dev.fb, x, 0, pipe.mode, *pipe.connectors)
x += pipe.mode.hdisplay
from PIL import ImageDraw
draw = ImageDraw.Draw(dev.fb.image)
if args.pattern == 'smpte':
draw_smpte_pattern(draw)
elif args.pattern == 'mono':
draw_mono_pattern(draw)
dev.fb.flush()
input("Press enter to stop")
if 0:
dev.fb.remove()
print(dump_framebuffers(drm))
print(pipe.crtc.inspect(True))
pipe.crtc.fetch()
print(pipe.crtc.inspect(True))
|
notro/pydrm
|
utils/modetest.py
|
Python
|
mit
| 15,613
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import logging
import unittest
import uuid
from unittest import mock
import pytest
from azure.cosmos.cosmos_client import CosmosClient
from airflow.exceptions import AirflowException
from airflow.models import Connection
from airflow.providers.microsoft.azure.hooks.cosmos import AzureCosmosDBHook
from airflow.utils import db
class TestAzureCosmosDbHook(unittest.TestCase):
# Set up an environment to test with
def setUp(self):
# set up some test variables
self.test_end_point = 'https://test_endpoint:443'
self.test_master_key = 'magic_test_key'
self.test_database_name = 'test_database_name'
self.test_collection_name = 'test_collection_name'
self.test_database_default = 'test_database_default'
self.test_collection_default = 'test_collection_default'
db.merge_conn(
Connection(
conn_id='azure_cosmos_test_key_id',
conn_type='azure_cosmos',
login=self.test_end_point,
password=self.test_master_key,
extra=json.dumps(
{
'database_name': self.test_database_default,
'collection_name': self.test_collection_default,
}
),
)
)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient', autospec=True)
def test_client(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
assert hook._conn is None
assert isinstance(hook.get_conn(), CosmosClient)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_create_database(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
hook.create_database(self.test_database_name)
expected_calls = [mock.call().CreateDatabase({'id': self.test_database_name})]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_create_database_exception(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
with pytest.raises(AirflowException):
hook.create_database(None)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_create_container_exception(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
with pytest.raises(AirflowException):
hook.create_collection(None)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_create_container(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
hook.create_collection(self.test_collection_name, self.test_database_name)
expected_calls = [
mock.call().CreateContainer('dbs/test_database_name', {'id': self.test_collection_name})
]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_create_container_default(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
hook.create_collection(self.test_collection_name)
expected_calls = [
mock.call().CreateContainer('dbs/test_database_default', {'id': self.test_collection_name})
]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_upsert_document_default(self, mock_cosmos):
test_id = str(uuid.uuid4())
mock_cosmos.return_value.CreateItem.return_value = {'id': test_id}
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
returned_item = hook.upsert_document({'id': test_id})
expected_calls = [
mock.call().CreateItem(
'dbs/' + self.test_database_default + '/colls/' + self.test_collection_default,
{'id': test_id},
)
]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
logging.getLogger().info(returned_item)
assert returned_item['id'] == test_id
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_upsert_document(self, mock_cosmos):
test_id = str(uuid.uuid4())
mock_cosmos.return_value.CreateItem.return_value = {'id': test_id}
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
returned_item = hook.upsert_document(
{'data1': 'somedata'},
database_name=self.test_database_name,
collection_name=self.test_collection_name,
document_id=test_id,
)
expected_calls = [
mock.call().CreateItem(
'dbs/' + self.test_database_name + '/colls/' + self.test_collection_name,
{'data1': 'somedata', 'id': test_id},
)
]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
logging.getLogger().info(returned_item)
assert returned_item['id'] == test_id
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_insert_documents(self, mock_cosmos):
test_id1 = str(uuid.uuid4())
test_id2 = str(uuid.uuid4())
test_id3 = str(uuid.uuid4())
documents = [
{'id': test_id1, 'data': 'data1'},
{'id': test_id2, 'data': 'data2'},
{'id': test_id3, 'data': 'data3'},
]
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
returned_item = hook.insert_documents(documents)
expected_calls = [
mock.call().CreateItem(
'dbs/' + self.test_database_default + '/colls/' + self.test_collection_default,
{'data': 'data1', 'id': test_id1},
),
mock.call().CreateItem(
'dbs/' + self.test_database_default + '/colls/' + self.test_collection_default,
{'data': 'data2', 'id': test_id2},
),
mock.call().CreateItem(
'dbs/' + self.test_database_default + '/colls/' + self.test_collection_default,
{'data': 'data3', 'id': test_id3},
),
]
logging.getLogger().info(returned_item)
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls, any_order=True)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_delete_database(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
hook.delete_database(self.test_database_name)
expected_calls = [mock.call().DeleteDatabase('dbs/test_database_name')]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_delete_database_exception(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
with pytest.raises(AirflowException):
hook.delete_database(None)
@mock.patch('azure.cosmos.cosmos_client.CosmosClient')
def test_delete_container_exception(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
with pytest.raises(AirflowException):
hook.delete_collection(None)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_delete_container(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
hook.delete_collection(self.test_collection_name, self.test_database_name)
expected_calls = [mock.call().DeleteContainer('dbs/test_database_name/colls/test_collection_name')]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
@mock.patch('airflow.providers.microsoft.azure.hooks.cosmos.CosmosClient')
def test_delete_container_default(self, mock_cosmos):
hook = AzureCosmosDBHook(azure_cosmos_conn_id='azure_cosmos_test_key_id')
hook.delete_collection(self.test_collection_name)
expected_calls = [mock.call().DeleteContainer('dbs/test_database_default/colls/test_collection_name')]
mock_cosmos.assert_any_call(self.test_end_point, {'masterKey': self.test_master_key})
mock_cosmos.assert_has_calls(expected_calls)
|
Acehaidrey/incubator-airflow
|
tests/providers/microsoft/azure/hooks/test_azure_cosmos.py
|
Python
|
apache-2.0
| 10,134
|
"""Tests for vumi.transports.irc.irc."""
from StringIO import StringIO
from twisted.internet.defer import (inlineCallbacks, returnValue,
DeferredQueue, Deferred)
from twisted.internet.protocol import FileWrapper
from vumi.tests.utils import LogCatcher
from vumi.transports.failures import FailureMessage, TemporaryFailure
from vumi.transports.irc.irc import IrcMessage, VumiBotProtocol
from vumi.transports.irc import IrcTransport
from vumi.transports.tests.helpers import TransportHelper
from vumi.tests.helpers import VumiTestCase
class TestIrcMessage(VumiTestCase):
def test_message(self):
msg = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?')
self.assertEqual(msg.sender, 'user')
self.assertEqual(msg.command, 'PRIVMSG')
self.assertEqual(msg.recipient, '#bar')
self.assertEqual(msg.content, 'hello?')
def test_action(self):
msg = IrcMessage('user!userfoo@example.com', 'ACTION', '#bar',
'hello?')
self.assertEqual(msg.command, 'ACTION')
def test_channel(self):
msg = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?')
self.assertEqual(msg.channel(), '#bar')
msg = IrcMessage('user!userfoo@example.com', 'PRIVMSG',
'user2!user2@example.com', 'hello?')
self.assertEqual(msg.channel(), None)
def test_nick(self):
msg = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?', 'nicktest')
self.assertEqual(msg.nickname, 'nicktest')
def test_addressed_to(self):
msg = IrcMessage('user!userfoo@example.com', 'PRIVMSG',
'otheruser!userfoo@example.com',
'hello?', 'nicktest')
self.assertFalse(msg.addressed_to('user'))
self.assertTrue(msg.addressed_to('otheruser'))
def test_equality(self):
msg1 = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?')
msg2 = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?')
self.assertTrue(msg1 == msg2)
def test_inequality(self):
msg1 = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?')
self.assertFalse(msg1 == object())
def test_canonicalize_recipient(self):
canonical = IrcMessage.canonicalize_recipient
self.assertEqual(canonical("user!userfoo@example.com"), "user")
self.assertEqual(canonical("#channel"), "#channel")
self.assertEqual(canonical("userfoo"), "userfoo")
class TestVumiBotProtocol(VumiTestCase):
nick = "testnick"
channel = "#test1"
def setUp(self):
self.f = StringIO()
self.t = FileWrapper(self.f)
self.vb = VumiBotProtocol(self.nick, [self.channel], self)
self.vb.makeConnection(self.t)
self.recvd_messages = []
def handle_inbound_irc_message(self, irc_msg):
self.recvd_messages.append(irc_msg)
def check(self, lines):
connect_lines = [
"NICK %s" % self.nick,
# foo and bar are twisted's mis-implementation of RFC 2812
# Compare http://tools.ietf.org/html/rfc2812#section-3.1.3
# and http://twistedmatrix.com/trac/browser/tags/releases/
# twisted-11.0.0/twisted/words/protocols/irc.py#L1552
"USER %s foo bar :None" % self.nick,
]
expected_lines = connect_lines + lines
self.assertEqual(self.f.getvalue().splitlines(), expected_lines)
def test_publish_message(self):
msg = IrcMessage('user!userfoo@example.com', 'PRIVMSG', '#bar',
'hello?')
self.vb.publish_message(msg)
self.check([])
[recvd_msg] = self.recvd_messages
self.assertEqual(recvd_msg, msg)
def test_consume_message_privmsg(self):
self.vb.consume_message(IrcMessage('user!userfoo@example.com',
'PRIVMSG', '#bar', 'hello?'))
self.check(["PRIVMSG #bar :hello?"])
def test_consume_message_action(self):
self.vb.consume_message(IrcMessage('user!userfoo@example.com',
'ACTION', '#bar', 'hello?'))
self.check(["PRIVMSG #bar :\x01ACTION hello?\x01"])
def test_connection_made(self):
# just check that the connect messages made it through
self.check([])
def test_connection_lost(self):
with LogCatcher() as logger:
self.vb.connectionLost("test loss of connection")
[logmsg] = logger.messages()
self.assertEqual(logmsg,
'Disconnected (nickname was: %s).' % self.nick)
self.assertEqual(logger.errors, [])
def test_signed_on(self):
self.vb.signedOn()
self.check(['JOIN %s' % self.channel])
def test_joined(self):
with LogCatcher() as logger:
self.vb.joined(self.channel)
[logmsg] = logger.messages()
self.assertEqual(logmsg, 'Joined %r' % self.channel)
def test_privmsg(self):
sender, command, recipient, text = (self.nick, 'PRIVMSG', "#zoo",
"Hello zooites")
self.vb.privmsg(sender, recipient, text)
[recvd_msg] = self.recvd_messages
self.assertEqual(recvd_msg,
IrcMessage(sender, command, recipient, text,
self.vb.nickname))
def test_action(self):
sender, command, recipient, text = (self.nick, 'ACTION', "#zoo",
"waves at zooites")
self.vb.action(sender, recipient, text)
[recvd_msg] = self.recvd_messages
self.assertEqual(recvd_msg,
IrcMessage(sender, command, recipient, text,
self.vb.nickname))
def test_irc_nick(self):
with LogCatcher() as logger:
self.vb.irc_NICK("oldnick!host", ["newnick"])
[logmsg] = logger.messages()
self.assertEqual(logmsg,
"Nick changed from 'oldnick' to 'newnick'")
def test_alter_collided_nick(self):
collided_nick = "commonnick"
new_nick = self.vb.alterCollidedNick(collided_nick)
self.assertEqual(new_nick, collided_nick + '^')
from twisted.internet.protocol import ServerFactory
from twisted.internet import reactor
from twisted.words.protocols.irc import IRC
class StubbyIrcServerProtocol(IRC):
hostname = '127.0.0.1'
def irc_unknown(self, prefix, command, params):
self.factory.events.put((prefix, command, params))
def connectionLost(self, reason):
IRC.connectionLost(self, reason)
self.factory.finished_d.callback(None)
class StubbyIrcServer(ServerFactory):
protocol = StubbyIrcServerProtocol
def startFactory(self):
self.server = None
self.events = DeferredQueue()
self.finished_d = Deferred()
def buildProtocol(self, addr):
self.server = ServerFactory.buildProtocol(self, addr)
self.server.factory = self
return self.server
@inlineCallbacks
def filter_events(self, command_type):
while True:
ev = yield self.events.get()
if ev[1] == command_type:
returnValue(ev)
class TestIrcTransport(VumiTestCase):
nick = 'vumibottest'
@inlineCallbacks
def setUp(self):
self.irc_server = StubbyIrcServer()
self.add_cleanup(lambda: self.irc_server.finished_d)
self.tx_helper = self.add_helper(TransportHelper(IrcTransport))
self.irc_connector = yield reactor.listenTCP(
0, self.irc_server, interface='127.0.0.1')
self.add_cleanup(self.irc_connector.stopListening)
addr = self.irc_connector.getHost()
self.server_addr = "%s:%s" % (addr.host, addr.port)
self.transport = yield self.tx_helper.get_transport({
'network': addr.host,
'port': addr.port,
'channels': [],
'nickname': self.nick,
})
# wait for transport to connect
yield self.irc_server.filter_events("NICK")
def dispatch_outbound_irc(self, *args, **kw):
helper_metadata = kw.setdefault('helper_metadata', {'irc': {}})
irc_command = kw.pop('irc_command', None)
if irc_command is not None:
helper_metadata['irc']['irc_command'] = irc_command
return self.tx_helper.make_dispatch_outbound(*args, **kw)
def assert_inbound_message(self, msg, to_addr, from_addr, channel, content,
addressed_to_transport, irc_command):
self.assertEqual(msg['transport_name'], self.tx_helper.transport_name)
self.assertEqual(msg['to_addr'], to_addr)
self.assertEqual(msg['from_addr'], from_addr)
self.assertEqual(msg['group'], channel)
self.assertEqual(msg['content'], content)
self.assertEqual(msg['helper_metadata'], {
'irc': {
'transport_nickname': self.nick,
'addressed_to_transport': addressed_to_transport,
'irc_server': self.server_addr,
'irc_channel': channel,
'irc_command': irc_command,
}
})
self.assertEqual(msg['transport_metadata'], {
'irc_channel': channel,
})
def assert_ack_for(self, msg, ack):
to_payload = lambda m: dict(
(k, v) for k, v in m.payload.iteritems()
if k not in ('event_id', 'timestamp', 'transport_type'))
self.assertEqual(to_payload(self.tx_helper.make_ack(msg)),
to_payload(ack))
def send_irc_message(self, content, recipient, sender="user!ident@host"):
self.irc_server.server.privmsg(sender, recipient, content)
@inlineCallbacks
def test_handle_inbound_to_channel(self):
text = "Hello gooites"
self.send_irc_message(text, "#zoo")
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assert_inbound_message(msg,
to_addr=None,
from_addr="user",
channel="#zoo",
content=text,
addressed_to_transport=False,
irc_command="PRIVMSG")
@inlineCallbacks
def test_handle_inbound_to_channel_directed(self):
self.send_irc_message("%s: Hi" % (self.nick,), "#zoo")
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assert_inbound_message(msg,
to_addr=self.nick,
from_addr="user",
channel="#zoo",
content="Hi",
addressed_to_transport=True,
irc_command="PRIVMSG")
@inlineCallbacks
def test_handle_inbound_to_user(self):
self.send_irc_message("Hi there", "%s!bot@host" % (self.nick,))
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assert_inbound_message(msg,
to_addr=self.nick,
from_addr="user",
channel=None,
content="Hi there",
addressed_to_transport=True,
irc_command="PRIVMSG")
@inlineCallbacks
def test_handle_inbound_channel_notice(self):
sender, recipient, text = "user!ident@host", "#zoo", "Hello gooites"
self.irc_server.server.notice(sender, recipient, text)
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['transport_name'], self.tx_helper.transport_name)
self.assertEqual(msg['to_addr'], None)
self.assertEqual(msg['from_addr'], "user")
self.assertEqual(msg['group'], "#zoo")
self.assertEqual(msg['content'], text)
self.assertEqual(msg['helper_metadata'], {
'irc': {
'transport_nickname': self.nick,
'addressed_to_transport': False,
'irc_server': self.server_addr,
'irc_channel': '#zoo',
'irc_command': 'NOTICE',
}
})
self.assertEqual(msg['transport_metadata'], {
'irc_channel': '#zoo',
})
@inlineCallbacks
def test_handle_inbound_user_notice(self):
sender, recipient, text = "user!ident@host", "bot", "Hello gooites"
self.irc_server.server.notice(sender, recipient, text)
[msg] = yield self.tx_helper.wait_for_dispatched_inbound(1)
self.assertEqual(msg['transport_name'], self.tx_helper.transport_name)
self.assertEqual(msg['to_addr'], "bot")
self.assertEqual(msg['from_addr'], "user")
self.assertEqual(msg['group'], None)
self.assertEqual(msg['content'], text)
self.assertEqual(msg['helper_metadata'], {
'irc': {
'transport_nickname': self.nick,
'addressed_to_transport': False,
'irc_server': self.server_addr,
'irc_channel': None,
'irc_command': 'NOTICE',
}
})
self.assertEqual(msg['transport_metadata'], {
'irc_channel': None,
})
@inlineCallbacks
def test_handle_outbound_message_while_disconnected(self):
yield self.irc_connector.stopListening()
self.transport.factory.vumibot.connectionLost("testing disconnect")
expected_error = "IrcTransport not connected."
yield self.dispatch_outbound_irc("outbound")
[error] = self.tx_helper.get_dispatched_failures()
self.assertTrue(error['reason'].strip().endswith(expected_error))
[error] = self.flushLoggedErrors(TemporaryFailure)
failure = error.value
self.assertEqual(failure.failure_code, FailureMessage.FC_TEMPORARY)
self.assertEqual(str(failure), expected_error)
@inlineCallbacks
def test_handle_outbound_to_channel_old(self):
msg = yield self.dispatch_outbound_irc(
"hello world", to_addr="#vumitest")
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['#vumitest', 'hello world']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
@inlineCallbacks
def test_handle_outbound_to_channel(self):
msg = yield self.dispatch_outbound_irc(
"hello world", to_addr=None, group="#vumitest")
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['#vumitest', 'hello world']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
@inlineCallbacks
def test_handle_outbound_to_channel_directed(self):
msg = yield self.dispatch_outbound_irc(
"hello world", to_addr="user", group="#vumitest")
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['#vumitest', 'user: hello world']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
@inlineCallbacks
def test_handle_outbound_to_user(self):
msg = yield self.dispatch_outbound_irc(
"hello world", to_addr="user", group=None)
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['user', 'hello world']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
@inlineCallbacks
def test_handle_outbound_action_to_channel(self):
msg = yield self.dispatch_outbound_irc(
"waves", to_addr=None, group="#vumitest", irc_command="ACTION")
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['#vumitest', '\x01ACTION waves\x01']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
@inlineCallbacks
def test_handle_outbound_action_to_channel_directed(self):
msg = yield self.dispatch_outbound_irc(
"waves", to_addr="user", group="#vumitest", irc_command='ACTION')
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['#vumitest', '\x01ACTION waves\x01']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
@inlineCallbacks
def test_handle_outbound_action_to_user(self):
msg = yield self.dispatch_outbound_irc(
"waves", to_addr="user", group=None, irc_command='ACTION')
event = yield self.irc_server.filter_events('PRIVMSG')
self.assertEqual(event, ('', 'PRIVMSG',
['user', '\x01ACTION waves\x01']))
[smsg] = self.tx_helper.get_dispatched_events()
self.assert_ack_for(msg, smsg)
|
TouK/vumi
|
vumi/transports/irc/tests/test_irc.py
|
Python
|
bsd-3-clause
| 17,711
|
# encoding: utf-8
# The contents of this file are subject to the Mozilla Public License
# Version 2.0 (the "License"); you may not use this file except in
# compliance with the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS"basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# OS2Webscanner was developed by Magenta in collaboration with OS2 the
# Danish community of open source municipalities (http://www.os2web.dk/).
#
# The code is currently governed by OS2 the Danish community of open
# source municipalities ( http://www.os2web.dk/ )
"""Contains Django views."""
import csv
from urllib.parse import unquote
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponse
from .views import LoginRequiredMixin, RestrictedListView, \
DeleteView, UpdateView
from ..models.match_model import Match
from ..models.referrerurl_model import ReferrerUrl
from ..models.scans.scan_model import Scan
from ..models.statistic_model import Statistic
from ..models.url_model import Url
from ..models.userprofile_model import UserProfile
class ReportList(RestrictedListView):
"""Displays list of scanners."""
model = Scan
template_name = 'os2webscanner/reports.html'
paginate_by = 15
def get_queryset(self):
"""Restrict to the organization of the logged-in user."""
user = self.request.user
if user.is_superuser:
reports = self.model.objects.all()
else:
try:
profile = user.profile
# TODO: Filter by group here if relevant.
if (
profile.is_group_admin or not
profile.organization.do_use_groups
):
reports = self.model.objects.filter(
scanner__organization=profile.organization
)
else:
reports = self.model.objects.filter(
scanner__group__in=profile.groups.all()
)
except UserProfile.DoesNotExist:
reports = self.model.objects.filter(
scanner__organization=None
)
reports = reports.filter(is_visible=True)
return reports.order_by('-start_time')
# Reports stuff
class ReportDetails(UpdateView, LoginRequiredMixin):
"""Display a detailed report summary."""
model = Scan
template_name = 'os2webscanner/report.html'
context_object_name = "scan"
full = False
fields = '__all__'
def get_queryset(self):
"""Get the queryset for the view.
If the user is not a superuser the queryset will be limited by the
user's organization.
"""
queryset = super().get_queryset()
if not self.request.user.is_superuser:
try:
user_profile = self.request.user.profile
organization = user_profile.organization
except UserProfile.DoesNotExist:
organization = None
queryset = queryset.filter(scanner__organization=organization)
return queryset
def get_context_data(self, **kwargs):
"""Add the scan's matches to the report context data."""
this_scan = self.get_object()
context = super().get_context_data(**kwargs)
all_matches = Match.objects.filter(
scan=this_scan
).order_by('-sensitivity', 'url', 'matched_rule', 'matched_data')
broken_urls = Url.objects.filter(
scan=this_scan
).exclude(status_code__isnull=True).order_by('url')
referrer_urls = ReferrerUrl.objects.filter(scan=this_scan)
context['full_report'] = self.full
context['broken_urls'] = broken_urls[:100]
context['no_of_broken_links'] = broken_urls.count()
context['referrer_urls'] = referrer_urls
context['matches'] = all_matches[:100]
context['all_matches'] = all_matches
context['no_of_matches'] = all_matches.count() + broken_urls.count()
context['failed_conversions'] = (
this_scan.get_number_of_failed_conversions()
)
try:
stats = Statistic.objects.get(scan=this_scan)
context['files_scraped_count'] = stats.files_scraped_count
context['files_is_dir_count'] = stats.files_is_dir_count
context['files_skipped_count'] = stats.files_skipped_count
except ObjectDoesNotExist:
pass
if hasattr(this_scan.scanner, 'filescanner'):
# Patch all of the context's match model objects to have paths and
# not encoded URLs. (This should be fine, since we don't save
# them, and it keeps this complexity out of the browser and
# template: the database genuinely shouldn't have URLs here, so
# let's pretend that it doesn't...)
for k in ['matches', 'all_matches']:
for m in context[k]:
path = unquote(m.url.url)
# While we're at it, if we have an alias for whichever
# domain this path came from, then convert the path into a
# Windows-style path
for domain in this_scan.domains.exclude(
filedomain__alias__isnull=True).exclude(
filedomain__alias__exact=''):
url_with_schema = "file://" + domain.url
if path.startswith(url_with_schema):
everything_else = \
path[len(url_with_schema):].strip('/')
# Windows appears, in my limited testing, to
# support forward slashes in paths nowadays
m.url.url = "file://{0}:/{1}".format(
domain.filedomain.alias, everything_else)
break
else:
m.url.url = path
return context
class ReportDelete(DeleteView, LoginRequiredMixin):
"""View for deleting a report."""
model = Scan
success_url = '/reports/'
def get_queryset(self):
"""Get the queryset for the view.
If the user is not a superuser the queryset will be limited by the
user's organization.
"""
queryset = super().get_queryset()
if not self.request.user.is_superuser:
try:
user_profile = self.request.user.profile
organization = user_profile.organization
except UserProfile.DoesNotExist:
organization = None
queryset = queryset.filter(scanner__organization=organization)
return queryset
class ScanReportLog(ReportDetails):
"""Display ordinary log file for debugging purposes."""
def render_to_response(self, context, **response_kwargs):
"""Render log file."""
scan = self.get_object()
response = HttpResponse(content_type="text/plain")
log_file = "scan{0}_log.txt".format(scan.id)
response[
'Content-Disposition'
] = 'attachment; filename={0}'.format(log_file)
with open(scan.scan_log_file, "r") as f:
response.write(f.read())
return response
class CSVReportDetails(ReportDetails):
"""Display full report in CSV format."""
def render_to_response(self, context, **response_kwargs):
"""Generate a CSV file and return it as the http response."""
scan = self.get_object()
response = HttpResponse(content_type='text/csv')
report_file = '{0}{1}.csv'.format(
scan.scanner.organization.name.replace(' ', '_'),
scan.id)
response[
'Content-Disposition'
] = 'attachment; filename={0}'.format(report_file)
writer = csv.writer(response)
all_matches = context['all_matches']
# CSV utilities
def e(fields):
return ([f.encode('utf-8') for f in fields])
# Print summary header
writer.writerow(e(['Starttidspunkt', 'Sluttidspunkt', 'Status',
'Totalt antal matches', 'Total antal broken links']))
# Print summary
writer.writerow(
e(
[str(scan.start_time),
str(scan.end_time), scan.get_status_display(),
str(context['no_of_matches']),
str(context['no_of_broken_links'])]
)
)
if all_matches:
# Print match header
writer.writerow(e(['URL', 'Regel', 'Match', 'Følsomhed']))
for match in all_matches:
writer.writerow(
e([match.url.url,
match.get_matched_rule_display(),
match.matched_data.replace('\n', '').replace('\r', ' '),
match.get_sensitivity_display()])
)
broken_urls = context['broken_urls']
if broken_urls:
# Print broken link header
writer.writerow(e(['Referrers', 'URL', 'Status']))
for url in broken_urls:
for referrer in url.referrers.all():
writer.writerow(
e([referrer.url,
url.url,
url.status_message])
)
return response
|
os2webscanner/os2webscanner
|
django-os2webscanner/os2webscanner/views/report_views.py
|
Python
|
mpl-2.0
| 9,704
|
#!/usr/bin/python
#
# bravialib - Will Cooke - Whizzy Labs - @8none1
# http://www.whizzy.org
# Copyright Will Cooke 2016. Released under the GPL.
#
#
# My attempt to talk to the Sony Bravia web API.
#
# This is designed to be used by a long running process
# So there is a potentially slow start-up time but then it should be quick enough
# at the expense of some memory usage
#
# The TV will give you access based on the device_id and nickname once you are authorised I think.
# The TV will need to be already switched on for this to work.
#
#
# Thanks:
# https://github.com/aparraga/braviarc/
# https://docs.google.com/viewer?a=v&pid=sites&srcid=ZGlhbC1tdWx0aXNjcmVlbi5vcmd8ZGlhbHxneDoyNzlmNzY3YWJlMmY1MjZl
#
# Some useful resources:
# A tidied up packet capture I did from the iphone app: http://paste.ubuntu.com/23417464/plain/
#
#
# TODO:
# Move logging out of prints and in to logging
#
import requests
from requests.auth import HTTPBasicAuth
import json
from xml.dom import minidom
import socket
import struct
import time
class MockResponse(object):
def __init__(self, status_code):
self.status_code = status_code
class Bravia(object):
def __init__(self, hostname = None, ip_addr = None, mac_addr = None):
self.ip_addr = ip_addr
self.hostname = hostname
self.mac_addr = mac_addr # You don't *have* to specify the MAC address as once we are paired via IP we can find
# it from the TV but it will only be stored for this session. If the TV is off and you are running this script
# from cold - you will need the MAC to wake the TV up.
if self.ip_addr is None and self.hostname is not None:
self.ip_addr = self._lookup_ip_from_hostname(self.hostname)
self.device_id = "WebInterface:001"
self.nickname = "IoT Remote Controller Interface"
self.endpoint = 'http://'+self.ip_addr
self.cookies = None
self.x_auth_psk = None # If you're using PSK instead of cookies you need to set this.
self.DIAL_cookie = {}
self.packet_id = 1
self.device_friendly_name = ""
self._JSON_HEADER = {'content-type':'application/json', 'connection':'close'}
self._TIMEOUT = 10
self.remote_controller_code_lookup = {}
self.app_lookup = {}
self.input_map = {}
self.dvbt_channels = {}
self.paired = False
def _debug_request(self, r):
# Pass a Requests response in here to see what happened
print "\n\n\n"
print "------- What was sent out ---------"
print r.request.headers
print r.request.body
print "---------What came back -----------"
print r.status_code
print r.headers
print r.text
print "-----------------------------------"
print "\n\n\n"
def _lookup_ip_from_hostname(self, hostname):
ipaddr = socket.gethostbyname(hostname)
if ipaddr is not '127.0.0.1':
return ipaddr
else:
# IP lookup failed
return False
def _build_json_payload(self,method, params = [], version="1.0"):
return {"id":self.packet_id, "method":method, "params":params,
"version":version}
def is_available(self):
# Try to find out if the TV is actually on or not. Pinging the TV would require
# this script to run as root, so not doing that. This function return True or
# False depending on if the box is on or not.
payload = self._build_json_payload("getPowerStatus")
try:
# Using a shorter timeout here so we can return more quickly
r = self.do_POST(url="/sony/system", payload = payload, timeout=2)
data = r.json()
if data.has_key('result'):
if data['result'][0]['status'] == "standby":
# TV is in standby mode, and so not on.
return False
elif data['result'][0]['status'] == "active":
# TV really is on
return True
else:
# Assume it's not on.
print "Uncaught result"
return False
if data.has_key('error'):
if 404 in data['error']:
# TV is probably booting at this point - so not available yet
return False
elif 403 in data['error']:
# A 403 Forbidden is acceptable here, because it means the TV is responding to requests
return True
else:
print "Uncaught error"
return False
return True
except requests.exceptions.ConnectTimeout:
print "No response, TV is probably off"
return False
except requests.exceptions.ConnectionError:
print "TV is certainly off."
return False
except requests.exceptions.ReadTimeout:
print "TV is on but not accepting commands yet"
return False
except ValueError:
print "Didn't get back JSON as expected"
# This might lead to false negatives - need to check
return False
def do_GET(self, url=None, headers=None, auth=None, cookies=None, timeout=None):
if url is None: return False
if url[0:4] != "http": url=self.endpoint+url
if cookies is None and self.cookies is not None: cookies=self.cookies
if self.x_auth_psk is not None: headers['X-Auth-PSK']=self.x_auth_psk
if timeout is None: timeout = self._TIMEOUT
if headers is None:
r = requests.get(url, cookies=cookies, auth=auth, timeout=timeout)
else:
r = requests.get(url, headers=headers, cookies=cookies, auth=auth, timeout=timeout)
return r
def do_POST(self, url=None, payload=None, headers=None, auth=None, cookies=None, timeout=None):
if url is None: return False
if type(payload) is dict: payload = json.dumps(payload)
if headers is None: headers = self._JSON_HEADER # If you don't want any extra headers pass in ""
if cookies is None and self.cookies is not None: cookies=self.cookies
if self.x_auth_psk is not None: headers['X-Auth-PSK']=self.x_auth_psk
if timeout is None: timeout = self._TIMEOUT
if url[0:4] != "http": url = self.endpoint+url # if you want to pass just the path you can, otherwise pass a full url and it will be used
self.packet_id += 1 # From packet captures, this increments on each request, so its a good idea to use this method all the time
if auth is not None:
r = requests.post(url, data=payload, headers=headers, cookies=cookies, auth=self.auth, timeout=timeout)
else:
r = requests.post(url, data=payload, headers=headers, cookies=cookies, timeout=timeout)
print r
return r
def connect(self):
# TODO: What if the TV is off and we can't connect?
#
# From looking at packet captures what seems to happen is:
# 1. Try and connect to the accessControl interface with the "pinRegistration" part in the payload
# 2. If you get back a 200 *and* the return data looks OK then you have already authorised
# 3. If #2 is a 200 you get back an auth token. I think that this token will expire, so we might need to
# re-connect later on - given that this script will be running for a long time. Hopefully you won't
# need to get a new PIN number ever.
# 4. If #2 was a 401 then you need to authorise, and then you do that by sending the PIN on screen as
# a base64 encoded BasicAuth using a blank username (e.g. "<username>:<password" -> ":1234")
# If that works, you should get a cookie back.
# 5. Use the cookie in all subsequent requests. Note there is an issue with this. The cookie is for
# path "/sony/" *but* the Apps are run from a path "/DIAL/sony/" so I try and fix this by adding a
# second cookie with that path and the same auth data.
if self.x_auth_psk is None: # We have not specified a PSK therefore we have to use Cookies
payload = self._build_json_payload("actRegister",
[{"clientid":self.device_id,"nickname":self.nickname},
[{"value":"no","function":"WOL"},
{"value":"no","function":"pinRegistration"}]])
try:
r = self.do_POST(url='/sony/accessControl', payload=payload)
except requests.exceptions.ConnectTimeout:
print "No response, TV is probably off"
return None, False
except requests.exceptions.ConnectionError:
print "TV is certainly off."
return None, False
if r.status_code == 200:
# Rather handily, the TV returns a 200 if the TV is in stand-by but not really on :)
try:
if "error" in r.json(): #.keys():
if "not power-on" in r.json()['error']:
# TV isn't powered up
r = self.wakeonlan()
print "TV not on! Have sent wakeonlan, probably try again in a mo."
# TODO: make this less crap
return None,False
except:
raise
# If we get here then We are already paired so get the new token
self.paired = True
self.cookies = r.cookies
# Also add the /DIAL/ path cookie
# Looks like requests doesn't handle two cookies with the same name ('auth') in one jar
# so going to have a dict for the DIAL cookie and pass around as needed. :/
a = r.headers['Set-Cookie'].split(';') # copy the cookie data headers
for each in a:
if len(each) > 0:
b = each.split('=')
self.DIAL_cookie[b[0].strip()] = b[1]
elif r.status_code == 401:
print "We are not paired!"
return r,False
elif r.status_code == 404:
# Most likely the TV hasn't booted yet
print("TV probably hasn't booted yet")
return r,False
else: return None,False
else: # We are using a PSK
self.paired = True
self.cookies = None
self.DIAL_cookie = None
r = None
# Populate some data now automatically.
print "Getting DMR info..."
self.get_dmr()
print "Getting sysem info..."
self.get_system_info()
print "Populating remote control codes..."
self.populate_controller_lookup()
print "Enumerating TV inputs..."
self.get_input_map()
print "Populating apps list..."
self.populate_apps_lookup()
print "Populating channel list..."
self.get_channel_list()
print "Matching HD channels..."
self.create_HD_chan_lookups() # You might not want to do this if you don't use Freeview in the UK
print "Done initialising TV data."
return r,True
def start_pair(self):
# This should prompt the TV to display the pairing screen
payload = self._build_json_payload("actRegister",
[{"clientid":self.device_id,"nickname":self.nickname},
[{"value":"no","function":"WOL"}]])
r = self.do_POST(url='/sony/accessControl', payload=payload)
if r.status_code == 200:
print "Probably already paired"
return r,True
if r.status_code == 401:
return r,False
else:
return None,False
def complete_pair(self, pin):
# The user should have a PIN on the screen now, pass it in here to complete the pairing process
payload = self._build_json_payload("actRegister",
[{"clientid":self.device_id, "nickname":self.nickname},
[{"value":"no", "function":"WOL"}]])
self.auth = HTTPBasicAuth('',pin) # Going to keep this in the object, just in case we need it again later
r = self.do_POST(url='/sony/accessControl', payload=payload, auth=self.auth)
if r.status_code == 200:
print("have paired")
self.paired = True
# let's call connect again to get the cookies all set up properly
a,b = self.connect()
if b is True:
return r,True
else: return r,False
else:
return None,False
def get_system_info(self):
payload = self._build_json_payload("getSystemInformation")
r = self.do_POST(url="/sony/system", payload=payload)
if r.status_code == 200:
self.system_info = r.json()['result'][0]
if self.mac_addr == None: self.mac_addr = self.system_info['macAddr']
return self.system_info
else:
return False
def get_input_map(self):
payload = self._build_json_payload("getCurrentExternalInputsStatus")
r = self.do_POST(url="/sony/avContent", payload=payload)
if r.status_code == 200:
for each in r.json()['result'][0]:
self.input_map[each['title']] = {'label':each['label'], 'uri':each['uri']}
return True
else:
return False
def get_input_uri_from_label(self, label):
for each in self.input_map:
if self.input_map[each]['label'] == label:
return self.input_map[each]['uri']
print "Didnt match the input name."
return None
def set_external_input(self, uri):
payload = self._build_json_payload("setPlayContent", [{"uri":uri}])
r = self.do_POST(url="/sony/avContent", payload=payload)
if r.status_code == 200:
if "error" in r.json():
# Something didnt work. The JSON will tell you what.
return False
else:
return True
else:
return False
def get_dmr(self):
r = self.do_GET('http://'+self.ip_addr+':52323/dmr.xml')
self.dmr_data = minidom.parseString(r.text)
# XML. FFS. :(
self.device_friendly_name = self.dmr_data.getElementsByTagName('friendlyName')[0].childNodes[0].data
a = self.dmr_data.getElementsByTagNameNS('urn:schemas-sony-com:av','X_IRCCCode')
for each in a:
name = each.getAttribute("command")
value = each.firstChild.nodeValue
self.remote_controller_code_lookup[name.lower()] = value
# Not much more interesting stuff here really, but see: https://aydbe.com/assets/uploads/2014/11/json.txt
# and https://github.com/bunk3r/braviapy
# Maybe /sony/system/setLEDIndicatorStatus would be fun?
#"setLEDIndicatorStatus" -> {"mode":"string","status":"bool"}
# Maybe mode is a hex colour? and bool is on/off?
def populate_controller_lookup(self):
payload = self._build_json_payload("getRemoteControllerInfo")
r = self.do_POST(url='/sony/system', payload=payload)
if r.status_code == 200:
for each in r.json()['result'][1]:
self.remote_controller_code_lookup[each['name'].lower()] = each['value']
return True
else:
return False
def do_remote_control(self,action):
# Pass in the action name, such as:
# "PowerOff" "Mute" "Pause" "Play"
# You can probably guess what these would be, but if not:
# print <self>.remote_controller_code_lookup
action = action.lower()
if action in self.remote_controller_code_lookup: #.keys():
ircc_code = self.remote_controller_code_lookup[action]
else: return False
header = {'SOAPACTION': '"urn:schemas-sony-com:service:IRCC:1#X_SendIRCC"'}
url = "/sony/IRCC"
body = '<?xml version="1.0"?>' # Look at all this crap just to send a remote control code...
body += '<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/" s:encodingStyle="http://schemas.xmlsoap.org/soap/encoding/">'
body += '<s:Body>'
body += '<u:X_SendIRCC xmlns:u="urn:schemas-sony-com:service:IRCC:1">'
body += '<IRCCCode>' + ircc_code + '</IRCCCode>'
body += '</u:X_SendIRCC>'
body += '</s:Body>'
body += '</s:Envelope>'
try:
r = self.do_POST(url=url, payload=body, headers=header)
except requests.exceptions.ConnectTimeout:
print("Connect timeout error")
r = MockResponse(200)
except requests.exceptions.ConnectionError:
print("Connect error")
r = MockResponse(200)
if r.status_code == 200:
return True
else:
return False
def populate_apps_lookup(self):
# Interesting note: If you don't do this (presumably just calling the
# URL is enough) then apps won't actually launch and you will get a 404
# error back from the TV. Once you've called this it starts working.
self.app_lookup={}
r = self.do_GET(url="/DIAL/sony/applist", cookies=self.DIAL_cookie)
if r.status_code == 200:
app_xml_data = minidom.parseString(r.text.encode('utf-8'))
for each in app_xml_data.getElementsByTagName('app'):
appid = each.getElementsByTagName('id')[0].firstChild.data
appname = each.getElementsByTagName('name')[0].firstChild.data
try: iconurl = each.getElementsByTagName('icon_url')[0].firstChild.data
except: iconurl = None
self.app_lookup[appname] = {'id':appid, 'iconurl':iconurl}
return True
else:
return False
def load_app(self, app_name):
# Pass in the name of the app, the most useful ones on my telly are:
# "Amazon Instant Video" , "Netflix", "BBC iPlayer", "Demand 5"
if self.app_lookup == {}: self.populate_apps_lookup() # This must happen before apps will launch
try:
app_id = self.app_lookup[app_name]['id']
except KeyError:
return False
print "Trying to load app:", app_id
headers = {'Connection':'close'}
r = self.do_POST(url="/DIAL/apps/"+app_id, headers=headers,
cookies=self.DIAL_cookie)
print r.status_code
print r.headers
print r
if r.status_code == 201:
return True
else:
return False
def get_app_status(self):
payload = self._build_json_payload("getApplicationStatusList")
r = self.do_POST(url="/sony/appControl", payload=payload)
return r.json()
def get_channel_list(self):
# This only supports dvbt for now...
# First, we find out how many channels there are
payload = self._build_json_payload("getContentCount",
[{"target":"all", "source":"tv:dvbt"}], version="1.1")
r = self.do_POST(url="/sony/avContent", payload=payload)
chan_count = int(r.json()['result'][0]['count'])
# It seems to only return the channels in lumps of 50, and some of those returned are blank?
chunk_size = 50
loops = int(chan_count / chunk_size) + (chan_count % chunk_size > 0) # Sneaky round up trick, the mod > 0 evaluates to int 1
chunk = 0
for x in range(loops):
payload = self._build_json_payload("getContentList",
[{"stIdx":chunk, "source":"tv:dvbt", "cnt":chunk_size,
"target":"all" }], version="1.2")
r = self.do_POST(url="/sony/avContent", payload=payload)
a = r.json()['result'][0]
for each in a:
if each['title'] == "": continue # We get back some blank entries, so just ignore them
if self.dvbt_channels.has_key(each['title']):
# Channel has already been added, we only want to keep the one with the lowest chan_num.
# The TV seems to return channel data for channels it can't actually receive (e.g. out of
# area local BBC channels). Trying to tune to these gives an error.
if int(each['dispNum']) > int(self.dvbt_channels[each['title']]['chan_num']):
# This is probably not a "real" channel we care about, so skip it.
continue
#self.dvbt_channels[each['title']] = {'chan_num':each['dispNum'], 'uri':each['uri']}
else:
self.dvbt_channels[each['title']] = {'chan_num':each['dispNum'], 'uri':each['uri']}
chunk += chunk_size
def create_HD_chan_lookups(self):
# This should probably be in the script that imports this library not in
# the library itself, but I wanted this feature, so I'm chucking it in
# here. This probably only works for Freeview in the UK.
# Use case to demonstrate why this is here: You want to use Alexa to
# switch the channel. Naturally, you want the HD channel if there is
# one but you don't want to have to say "BBC ONE HD" because that would
# be stupid. So you just say "BBC ONE" and the script does the work to
# find the HD version for you.
for each in self.dvbt_channels.iteritems():
hd_version = "%s HD" % each[0] # e.g. "BBC ONE" -> "BBC ONE HD"
if hd_version in self.dvbt_channels:
# Extend the schema by adding a "hd_uri" key
self.dvbt_channels[each[0]]['hd_uri'] = self.dvbt_channels[hd_version]['uri']
def get_channel_uri(self, title):
if self.dvbt_channels == {}: self.get_channel_list()
try:
return self.dvbt_channels[title]['uri']
except KeyError:
return False
def wakeonlan(self, mac=None):
# Thanks: Taken from https://github.com/aparraga/braviarc/blob/master/braviarc/braviarc.py
# Not using another library for this as it's pretty small...
if mac is None and self.mac_addr is not None:
mac = self.mac_addr
print "Waking MAC: " + mac
addr_byte = mac.split(':')
hw_addr = struct.pack('BBBBBB', int(addr_byte[0], 16),
int(addr_byte[1], 16),
int(addr_byte[2], 16),
int(addr_byte[3], 16),
int(addr_byte[4], 16),
int(addr_byte[5], 16))
msg = b'\xff' * 6 + hw_addr * 16
socket_instance = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
socket_instance.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
socket_instance.sendto(msg, ('<broadcast>', 9))
socket_instance.close()
return True
def poweron(self):
# Convenience function to switch the TV on and block until it's ready
# to accept commands.
if self.paired is False:
print "You can only call this function once paired with the TV"
return False
elif self.paired is True:
ready = False
if self.is_available() is True:
# If we're already on, return now.
return True
self.wakeonlan()
for x in range(10):
if self.is_available() is True:
print "TV now available"
return True
else:
print "Didn't get a response. Trying again in 10 seconds. (Attempt "+str(x+1)+" of 10)"
time.sleep(10)
if ready is False:
print "Couldnt connect in a timely manner. Giving up"
return False
else:
return True
def get_client_ip(self):
host_ip = [(s.connect(('8.8.8.8', 80)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]
return host_ip
|
8none1/bravialib
|
bravialib.py
|
Python
|
gpl-3.0
| 24,487
|
from PyQt5.QtCore import Qt, QModelIndex, QAbstractListModel, pyqtSignal
from urh.signalprocessing.Participant import Participant
from urh.simulator.SimulatorConfiguration import SimulatorConfiguration
class SimulatorParticipantListModel(QAbstractListModel):
participant_simulate_changed = pyqtSignal(Participant)
def __init__(self, config: SimulatorConfiguration, parent=None):
super().__init__(parent)
self.simulator_config = config
def update(self):
self.beginResetModel()
self.endResetModel()
def rowCount(self, parent: QModelIndex = None, *args, **kwargs):
return len(self.simulator_config.active_participants)
def data(self, index: QModelIndex, role=Qt.DisplayRole):
i = index.row()
participant = self.simulator_config.active_participants[i]
if not index.isValid():
return None
if role == Qt.DisplayRole:
return participant.name + " (" + participant.shortname + ")"
elif role == Qt.CheckStateRole:
return Qt.Checked if participant.simulate else Qt.Unchecked
def setData(self, index: QModelIndex, value, role=None):
i = index.row()
participants = self.simulator_config.active_participants
if role == Qt.CheckStateRole:
participants[i].simulate = value
self.update()
self.participant_simulate_changed.emit(participants[i])
return True
def flags(self, index: QModelIndex):
return Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsUserCheckable
|
jopohl/urh
|
src/urh/models/SimulatorParticipantListModel.py
|
Python
|
gpl-3.0
| 1,580
|
import json
import os
import platform
import signal
import subprocess
import sys
from abc import ABCMeta, abstractmethod
import mozinfo
import mozleak
import mozversion
from mozprocess import ProcessHandler
from mozprofile import FirefoxProfile, Preferences
from mozrunner import FirefoxRunner
from mozrunner.utils import test_environment, get_stack_fixer_function
from mozcrash import mozcrash
from .base import (get_free_port,
Browser,
ExecutorBrowser,
require_arg,
cmd_arg,
browser_command)
from ..executors import executor_kwargs as base_executor_kwargs
from ..executors.executormarionette import (MarionetteTestharnessExecutor, # noqa: F401
MarionetteRefTestExecutor, # noqa: F401
MarionettePrintRefTestExecutor, # noqa: F401
MarionetteWdspecExecutor, # noqa: F401
MarionetteCrashtestExecutor) # noqa: F401
from ..process import cast_env
here = os.path.dirname(__file__)
__wptrunner__ = {"product": "firefox",
"check_args": "check_args",
"browser": "FirefoxBrowser",
"executor": {"crashtest": "MarionetteCrashtestExecutor",
"testharness": "MarionetteTestharnessExecutor",
"reftest": "MarionetteRefTestExecutor",
"print-reftest": "MarionettePrintRefTestExecutor",
"wdspec": "MarionetteWdspecExecutor"},
"browser_kwargs": "browser_kwargs",
"executor_kwargs": "executor_kwargs",
"env_extras": "env_extras",
"env_options": "env_options",
"run_info_extras": "run_info_extras",
"update_properties": "update_properties",
"timeout_multiplier": "get_timeout_multiplier"}
def get_timeout_multiplier(test_type, run_info_data, **kwargs):
if kwargs["timeout_multiplier"] is not None:
return kwargs["timeout_multiplier"]
if test_type == "reftest":
if run_info_data["debug"] or run_info_data.get("asan"):
return 4
else:
return 2
elif run_info_data["debug"] or run_info_data.get("asan"):
if run_info_data.get("ccov"):
return 4
else:
return 3
elif run_info_data["os"] == "android":
return 4
# https://bugzilla.mozilla.org/show_bug.cgi?id=1538725
elif run_info_data["os"] == "win" and run_info_data["processor"] == "aarch64":
return 4
elif run_info_data.get("ccov"):
return 2
return 1
def check_args(**kwargs):
require_arg(kwargs, "binary")
def browser_kwargs(test_type, run_info_data, config, **kwargs):
return {"binary": kwargs["binary"],
"prefs_root": kwargs["prefs_root"],
"extra_prefs": kwargs["extra_prefs"],
"test_type": test_type,
"debug_info": kwargs["debug_info"],
"symbols_path": kwargs["symbols_path"],
"stackwalk_binary": kwargs["stackwalk_binary"],
"certutil_binary": kwargs["certutil_binary"],
"ca_certificate_path": config.ssl_config["ca_cert_path"],
"e10s": kwargs["gecko_e10s"],
"enable_webrender": kwargs["enable_webrender"],
"enable_fission": kwargs["enable_fission"],
"stackfix_dir": kwargs["stackfix_dir"],
"binary_args": kwargs["binary_args"],
"timeout_multiplier": get_timeout_multiplier(test_type,
run_info_data,
**kwargs),
"leak_check": run_info_data["debug"] and (kwargs["leak_check"] is not False),
"asan": run_info_data.get("asan"),
"stylo_threads": kwargs["stylo_threads"],
"chaos_mode_flags": kwargs["chaos_mode_flags"],
"config": config,
"browser_channel": kwargs["browser_channel"],
"headless": kwargs["headless"],
"preload_browser": kwargs["preload_browser"],
"specialpowers_path": kwargs["specialpowers_path"]}
def executor_kwargs(test_type, server_config, cache_manager, run_info_data,
**kwargs):
executor_kwargs = base_executor_kwargs(test_type, server_config,
cache_manager, run_info_data,
**kwargs)
executor_kwargs["close_after_done"] = test_type != "reftest"
executor_kwargs["timeout_multiplier"] = get_timeout_multiplier(test_type,
run_info_data,
**kwargs)
executor_kwargs["e10s"] = run_info_data["e10s"]
capabilities = {}
if test_type == "testharness":
capabilities["pageLoadStrategy"] = "eager"
if test_type in ("reftest", "print-reftest"):
executor_kwargs["reftest_internal"] = kwargs["reftest_internal"]
executor_kwargs["reftest_screenshot"] = kwargs["reftest_screenshot"]
if test_type == "wdspec":
options = {}
if kwargs["binary"]:
options["binary"] = kwargs["binary"]
if kwargs["binary_args"]:
options["args"] = kwargs["binary_args"]
if kwargs["headless"]:
if "args" not in options:
options["args"] = []
if "--headless" not in options["args"]:
options["args"].append("--headless")
options["prefs"] = {
"network.dns.localDomains": ",".join(server_config.domains_set)
}
for pref, value in kwargs["extra_prefs"]:
options["prefs"].update({pref: Preferences.cast(value)})
capabilities["moz:firefoxOptions"] = options
if kwargs["certutil_binary"] is None:
capabilities["acceptInsecureCerts"] = True
if capabilities:
executor_kwargs["capabilities"] = capabilities
executor_kwargs["debug"] = run_info_data["debug"]
executor_kwargs["ccov"] = run_info_data.get("ccov", False)
executor_kwargs["browser_version"] = run_info_data.get("browser_version")
return executor_kwargs
def env_extras(**kwargs):
return []
def env_options():
# The server host is set to 127.0.0.1 as Firefox is configured (through the
# network.dns.localDomains preference set below) to resolve the test
# domains to localhost without relying on the network stack.
#
# https://github.com/web-platform-tests/wpt/pull/9480
return {"server_host": "127.0.0.1",
"supports_debugger": True}
def run_info_extras(**kwargs):
def get_bool_pref_if_exists(pref):
for key, value in kwargs.get('extra_prefs', []):
if pref == key:
return value.lower() in ('true', '1')
return None
def get_bool_pref(pref):
pref_value = get_bool_pref_if_exists(pref)
return pref_value if pref_value is not None else False
rv = {"e10s": kwargs["gecko_e10s"],
"wasm": kwargs.get("wasm", True),
"verify": kwargs["verify"],
"headless": kwargs.get("headless", False) or "MOZ_HEADLESS" in os.environ,
"sw-e10s": True,
"fission": kwargs.get("enable_fission") or get_bool_pref("fission.autostart")}
# The value of `sw-e10s` defaults to whether the "parent_intercept"
# implementation is enabled for the current build. This value, however,
# can be overridden by explicitly setting the pref with the `--setpref` CLI
# flag, which is checked here.
sw_e10s_override = get_bool_pref_if_exists("dom.serviceWorkers.parent_intercept")
if sw_e10s_override is not None:
rv["sw-e10s"] = sw_e10s_override
rv.update(run_info_browser_version(**kwargs))
return rv
def run_info_browser_version(**kwargs):
try:
version_info = mozversion.get_version(kwargs["binary"])
except mozversion.errors.VersionError:
version_info = None
if version_info:
rv = {"browser_build_id": version_info.get("application_buildid", None),
"browser_changeset": version_info.get("application_changeset", None)}
if "browser_version" not in kwargs:
rv["browser_version"] = version_info.get("application_version")
return rv
return {}
def update_properties():
return (["os", "debug", "webrender", "fission", "e10s", "sw-e10s", "processor"],
{"os": ["version"], "processor": ["bits"]})
class FirefoxInstanceManager(object):
__metaclass__ = ABCMeta
def __init__(self, logger, binary, binary_args, profile_creator, debug_info,
chaos_mode_flags, headless, enable_webrender, stylo_threads,
leak_check, stackfix_dir, symbols_path, asan):
"""Object that manages starting and stopping instances of Firefox."""
self.logger = logger
self.binary = binary
self.binary_args = binary_args
self.base_profile = profile_creator.create()
self.debug_info = debug_info
self.chaos_mode_flags = chaos_mode_flags
self.headless = headless
self.enable_webrender = enable_webrender
self.stylo_threads = stylo_threads
self.leak_check = leak_check
self.stackfix_dir = stackfix_dir
self.symbols_path = symbols_path
self.asan = asan
self.previous = None
self.current = None
@abstractmethod
def teardown(self, force=False):
pass
@abstractmethod
def get(self):
"""Get a BrowserInstance for a running Firefox.
This can only be called once per instance, and between calls stop_current()
must be called."""
pass
def stop_current(self, force=False):
"""Shutdown the current instance of Firefox.
The BrowserInstance remains available through self.previous, since some
operations happen after shutdown."""
if not self.current:
return
self.current.stop(force)
self.previous = self.current
self.current = None
def start(self):
"""Start an instance of Firefox, returning a BrowserInstance handle"""
profile = self.base_profile.clone(self.base_profile.profile)
marionette_port = get_free_port()
profile.set_preferences({"marionette.port": marionette_port})
env = test_environment(xrePath=os.path.abspath(os.path.dirname(self.binary)),
debugger=self.debug_info is not None,
useLSan=True, log=self.logger)
env["STYLO_THREADS"] = str(self.stylo_threads)
if self.chaos_mode_flags is not None:
env["MOZ_CHAOSMODE"] = str(self.chaos_mode_flags)
if self.headless:
env["MOZ_HEADLESS"] = "1"
if self.enable_webrender:
env["MOZ_WEBRENDER"] = "1"
env["MOZ_ACCELERATED"] = "1"
else:
env["MOZ_WEBRENDER"] = "0"
args = self.binary_args[:] if self.binary_args else []
args += [cmd_arg("marionette"), "about:blank"]
debug_args, cmd = browser_command(self.binary,
args,
self.debug_info)
if self.leak_check:
leak_report_file = os.path.join(profile.profile, "runtests_leaks_%s.log" % os.getpid())
if os.path.exists(leak_report_file):
os.remove(leak_report_file)
env["XPCOM_MEM_BLOAT_LOG"] = leak_report_file
else:
leak_report_file = None
output_handler = OutputHandler(self.logger, self.stackfix_dir, self.symbols_path, self.asan)
runner = FirefoxRunner(profile=profile,
binary=cmd[0],
cmdargs=cmd[1:],
env=cast_env(env),
process_class=ProcessHandler,
process_args={"processOutputLine": [output_handler]})
instance = BrowserInstance(self.logger, runner, marionette_port,
output_handler, leak_report_file)
self.logger.debug("Starting Firefox")
runner.start(debug_args=debug_args, interactive=self.debug_info and self.debug_info.interactive)
self.logger.debug("Firefox Started")
return instance
class SingleInstanceManager(FirefoxInstanceManager):
"""FirefoxInstanceManager that manages a single Firefox instance"""
def get(self):
assert not self.current, ("Tried to call get() on InstanceManager that has "
"an existing instance")
if self.previous:
self.previous.cleanup()
self.previous = None
self.current = self.start()
return self.current
def teardown(self, force=False):
for instance in [self.previous, self.current]:
if instance:
instance.stop(force)
instance.cleanup()
class PreloadInstanceManager(FirefoxInstanceManager):
def __init__(self, *args, **kwargs):
"""FirefoxInstanceManager that keeps once Firefox instance preloaded
to allow rapid resumption after an instance shuts down."""
super(PreloadInstanceManager, self).__init__(*args, **kwargs)
self.pending = None
def get(self):
assert not self.current, ("Tried to call get() on InstanceManager that has "
"an existing instance")
if self.previous:
self.previous.cleanup()
self.previous = None
if not self.pending:
self.pending = self.start()
self.current = self.pending
self.pending = self.start()
return self.current
def teardown(self, force=False):
for instance, skip_marionette in [(self.previous, False),
(self.current, False),
(self.pending, True)]:
if instance:
instance.stop(force, skip_marionette=skip_marionette)
instance.cleanup()
class BrowserInstance(object):
shutdown_timeout = 70
def __init__(self, logger, runner, marionette_port, output_handler, leak_report_file):
"""Handle to a running Firefox instance"""
self.logger = logger
self.runner = runner
self.marionette_port = marionette_port
self.output_handler = output_handler
self.leak_report_file = leak_report_file
def stop(self, force=False, skip_marionette=False):
"""Stop Firefox"""
is_running = self.runner is not None and self.runner.is_running()
if is_running:
self.logger.debug("Stopping Firefox %s" % self.pid())
shutdown_methods = [(True, lambda: self.runner.wait(self.shutdown_timeout)),
(False, lambda: self.runner.stop(signal.SIGTERM)),
(False, lambda: self.runner.stop(signal.SIGKILL))]
if skip_marionette:
shutdown_methods = shutdown_methods[1:]
try:
# For Firefox we assume that stopping the runner prompts the
# browser to shut down. This allows the leak log to be written
for clean, stop_f in shutdown_methods:
if not force or not clean:
retcode = stop_f()
if retcode is not None:
self.logger.info("Browser exited with return code %s" % retcode)
break
except OSError:
# This can happen on Windows if the process is already dead
pass
elif self.runner:
# The browser was already stopped, which we assume was a crash
# TODO: Should we check the exit code here?
clean = False
if not skip_marionette:
self.output_handler.after_stop(clean_shutdown=clean)
def pid(self):
if self.runner.process_handler is None:
return None
try:
return self.runner.process_handler.pid
except AttributeError:
return None
def is_alive(self):
if self.runner:
return self.runner.is_running()
return False
def cleanup(self):
# mozprofile handles deleting the profile when the refcount reaches 0
self.runner = None
class OutputHandler(object):
def __init__(self, logger, stackfix_dir, symbols_path, asan):
"""Filter for handling Firefox process output.
This receives Firefox process output in the __call__ function, does
any additional processing that's required, and decides whether to log
the output. Because the Firefox process can be started before we know
which filters are going to be required, we buffer all output until
setup() is called. This is responsible for doing the final configuration
of the output handlers.
"""
self.logger = logger
# These are filled in after setup() is called
self.instance = None
self.symbols_path = symbols_path
if stackfix_dir:
# We hide errors because they cause disconcerting `CRITICAL`
# warnings in web platform test output.
self.stack_fixer = get_stack_fixer_function(stackfix_dir,
self.symbols_path,
hideErrors=True)
else:
self.stack_fixer = None
self.asan = asan
self.lsan_handler = None
self.mozleak_allowed = None
self.mozleak_thresholds = None
self.group_metadata = {}
self.line_buffer = []
self.setup_ran = False
def setup(self, instance=None, group_metadata=None, lsan_disabled=False,
lsan_allowed=None, lsan_max_stack_depth=None, mozleak_allowed=None,
mozleak_thresholds=None, **kwargs):
"""Configure the output handler"""
self.instance = instance
if group_metadata is None:
group_metadata = {}
self.group_metadata = group_metadata
self.mozleak_allowed = mozleak_allowed
self.mozleak_thresholds = mozleak_thresholds
if self.asan:
self.lsan_handler = mozleak.LSANLeaks(self.logger,
scope=group_metadata.get("scope", "/"),
allowed=lsan_allowed,
maxNumRecordedFrames=lsan_max_stack_depth,
allowAll=lsan_disabled)
else:
self.lsan_handler = None
self.setup_ran = True
for line in self.line_buffer:
self.__call__(line)
self.line_buffer = []
def after_stop(self, clean_shutdown=True):
self.logger.info("PROCESS LEAKS %s" % self.instance.leak_report_file)
if self.lsan_handler:
self.lsan_handler.process()
if self.instance.leak_report_file is not None:
if not clean_shutdown:
# If we didn't get a clean shutdown there probably isn't a leak report file
self.logger.warning("Firefox didn't exit cleanly, not processing leak logs")
else:
# We have to ignore missing leaks in the tab because it can happen that the
# content process crashed and in that case we don't want the test to fail.
# Ideally we would record which content process crashed and just skip those.
mozleak.process_leak_log(
self.instance.leak_report_file,
leak_thresholds=self.mozleak_thresholds,
ignore_missing_leaks=["tab", "gmplugin"],
log=self.logger,
stack_fixer=self.stack_fixer,
scope=self.group_metadata.get("scope"),
allowed=self.mozleak_allowed)
def __call__(self, line):
"""Write a line of output from the firefox process to the log"""
if b"GLib-GObject-CRITICAL" in line:
return
if line:
if not self.setup_ran:
self.line_buffer.append(line)
return
data = line.decode("utf8", "replace")
if self.stack_fixer:
data = self.stack_fixer(data)
if self.lsan_handler:
data = self.lsan_handler.log(data)
if data is not None:
self.logger.process_output(self.instance and
self.instance.runner.process_handler and
self.instance.runner.process_handler.pid,
data,
command=" ".join(self.instance.runner.command))
class ProfileCreator(object):
def __init__(self, logger, prefs_root, config, test_type, extra_prefs, e10s,
enable_fission, browser_channel, binary, certutil_binary, ca_certificate_path):
self.logger = logger
self.prefs_root = prefs_root
self.config = config
self.test_type = test_type
self.extra_prefs = extra_prefs
self.e10s = e10s
self.enable_fission = enable_fission
self.browser_channel = browser_channel
self.ca_certificate_path = ca_certificate_path
self.binary = binary
self.certutil_binary = certutil_binary
self.ca_certificate_path = ca_certificate_path
def create(self, **kwargs):
"""Create a Firefox profile and return the mozprofile Profile object pointing at that
profile
:param kwargs: Additional arguments to pass into the profile constructor
"""
preferences = self._load_prefs()
profile = FirefoxProfile(preferences=preferences,
**kwargs)
self._set_required_prefs(profile)
if self.ca_certificate_path is not None:
self._setup_ssl(profile)
return profile
def _load_prefs(self):
prefs = Preferences()
pref_paths = []
profiles = os.path.join(self.prefs_root, 'profiles.json')
if os.path.isfile(profiles):
with open(profiles, 'r') as fh:
for name in json.load(fh)['web-platform-tests']:
if self.browser_channel in (None, 'nightly'):
pref_paths.append(os.path.join(self.prefs_root, name, 'user.js'))
elif name != 'unittest-features':
pref_paths.append(os.path.join(self.prefs_root, name, 'user.js'))
else:
# Old preference files used before the creation of profiles.json (remove when no longer supported)
legacy_pref_paths = (
os.path.join(self.prefs_root, 'prefs_general.js'), # Used in Firefox 60 and below
os.path.join(self.prefs_root, 'common', 'user.js'), # Used in Firefox 61
)
for path in legacy_pref_paths:
if os.path.isfile(path):
pref_paths.append(path)
for path in pref_paths:
if os.path.exists(path):
prefs.add(Preferences.read_prefs(path))
else:
self.logger.warning("Failed to find base prefs file in %s" % path)
# Add any custom preferences
prefs.add(self.extra_prefs, cast=True)
return prefs()
def _set_required_prefs(self, profile):
"""Set preferences required for wptrunner to function.
Note that this doesn't set the marionette port, since we don't always
know that at profile creation time. So the caller is responisble for
setting that once it's available."""
profile.set_preferences({
"network.dns.localDomains": ",".join(self.config.domains_set),
"dom.file.createInChild": True,
# TODO: Remove preferences once Firefox 64 is stable (Bug 905404)
"network.proxy.type": 0,
"places.history.enabled": False,
"network.preload": True,
})
if self.e10s:
profile.set_preferences({"browser.tabs.remote.autostart": True})
if self.enable_fission:
profile.set_preferences({"fission.autostart": True})
if self.test_type in ("reftest", "print-reftest"):
profile.set_preferences({"layout.interruptible-reflow.enabled": False})
if self.test_type == "print-reftest":
profile.set_preferences({"print.always_print_silent": True})
# Bug 1262954: winxp + e10s, disable hwaccel
if (self.e10s and platform.system() in ("Windows", "Microsoft") and
"5.1" in platform.version()):
self.profile.set_preferences({"layers.acceleration.disabled": True})
def _setup_ssl(self, profile):
"""Create a certificate database to use in the test profile. This is configured
to trust the CA Certificate that has signed the web-platform.test server
certificate."""
if self.certutil_binary is None:
self.logger.info("--certutil-binary not supplied; Firefox will not check certificates")
return
self.logger.info("Setting up ssl")
# Make sure the certutil libraries from the source tree are loaded when using a
# local copy of certutil
# TODO: Maybe only set this if certutil won't launch?
env = os.environ.copy()
certutil_dir = os.path.dirname(self.binary or self.certutil_binary)
if mozinfo.isMac:
env_var = "DYLD_LIBRARY_PATH"
elif mozinfo.isUnix:
env_var = "LD_LIBRARY_PATH"
else:
env_var = "PATH"
env[env_var] = (os.path.pathsep.join([certutil_dir, env[env_var]])
if env_var in env else certutil_dir).encode(
sys.getfilesystemencoding() or 'utf-8', 'replace')
def certutil(*args):
cmd = [self.certutil_binary] + list(args)
self.logger.process_output("certutil",
subprocess.check_output(cmd,
env=cast_env(env),
stderr=subprocess.STDOUT),
" ".join(cmd))
pw_path = os.path.join(profile.profile, ".crtdbpw")
with open(pw_path, "w") as f:
# Use empty password for certificate db
f.write("\n")
cert_db_path = profile.profile
# Create a new certificate db
certutil("-N", "-d", cert_db_path, "-f", pw_path)
# Add the CA certificate to the database and mark as trusted to issue server certs
certutil("-A", "-d", cert_db_path, "-f", pw_path, "-t", "CT,,",
"-n", "web-platform-tests", "-i", self.ca_certificate_path)
# List all certs in the database
certutil("-L", "-d", cert_db_path)
class FirefoxBrowser(Browser):
init_timeout = 70
def __init__(self, logger, binary, prefs_root, test_type, extra_prefs=None, debug_info=None,
symbols_path=None, stackwalk_binary=None, certutil_binary=None,
ca_certificate_path=None, e10s=False, enable_webrender=False, enable_fission=False,
stackfix_dir=None, binary_args=None, timeout_multiplier=None, leak_check=False,
asan=False, stylo_threads=1, chaos_mode_flags=None, config=None,
browser_channel="nightly", headless=None, preload_browser=False,
specialpowers_path=None, **kwargs):
Browser.__init__(self, logger)
self.logger = logger
if timeout_multiplier:
self.init_timeout = self.init_timeout * timeout_multiplier
self.instance = None
self._settings = None
self.stackfix_dir = stackfix_dir
self.symbols_path = symbols_path
self.stackwalk_binary = stackwalk_binary
self.asan = asan
self.leak_check = leak_check
self.specialpowers_path = specialpowers_path
profile_creator = ProfileCreator(logger,
prefs_root,
config,
test_type,
extra_prefs,
e10s,
enable_fission,
browser_channel,
binary,
certutil_binary,
ca_certificate_path)
if preload_browser:
instance_manager_cls = PreloadInstanceManager
else:
instance_manager_cls = SingleInstanceManager
self.instance_manager = instance_manager_cls(logger,
binary,
binary_args,
profile_creator,
debug_info,
chaos_mode_flags,
headless,
enable_webrender,
stylo_threads,
leak_check,
stackfix_dir,
symbols_path,
asan)
def settings(self, test):
self._settings = {"check_leaks": self.leak_check and not test.leaks,
"lsan_disabled": test.lsan_disabled,
"lsan_allowed": test.lsan_allowed,
"lsan_max_stack_depth": test.lsan_max_stack_depth,
"mozleak_allowed": self.leak_check and test.mozleak_allowed,
"mozleak_thresholds": self.leak_check and test.mozleak_threshold,
"special_powers": self.specialpowers_path and test.url_base == "/_mozilla/"}
return self._settings
def start(self, group_metadata=None, **kwargs):
self.instance = self.instance_manager.get()
self.instance.output_handler.setup(self.instance,
group_metadata,
**kwargs)
def stop(self, force=False):
self.instance_manager.stop_current(force)
self.logger.debug("stopped")
def pid(self):
return self.instance.pid()
def is_alive(self):
return self.instance and self.instance.is_alive()
def cleanup(self, force=False):
self.instance_manager.teardown(force)
def executor_browser(self):
assert self.instance is not None
extensions = []
if self._settings.get("special_powers", False):
extensions.append(self.specialpowers_path)
return ExecutorBrowser, {"marionette_port": self.instance.marionette_port,
"extensions": extensions}
def check_crash(self, process, test):
dump_dir = os.path.join(self.instance.runner.profile.profile, "minidumps")
try:
return bool(mozcrash.log_crashes(self.logger,
dump_dir,
symbols_path=self.symbols_path,
stackwalk_binary=self.stackwalk_binary,
process=process,
test=test))
except IOError:
self.logger.warning("Looking for crash dump files failed")
return False
|
asajeffrey/servo
|
tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/browsers/firefox.py
|
Python
|
mpl-2.0
| 32,513
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for ssh/transport.py and the classes therein.
"""
import struct
try:
import pyasn1
except ImportError:
pyasn1 = None
try:
import Crypto.Cipher.DES3
except ImportError:
Crypto = None
if pyasn1 is not None and Crypto is not None:
dependencySkip = None
from twisted.conch.ssh import transport, keys, factory
from twisted.conch.test import keydata
else:
if pyasn1 is None:
dependencySkip = "Cannot run without PyASN1"
elif Crypto is None:
dependencySkip = "can't run w/o PyCrypto"
class transport: # fictional modules to make classes work
class SSHTransportBase: pass
class SSHServerTransport: pass
class SSHClientTransport: pass
class factory:
class SSHFactory:
pass
from hashlib import md5, sha1
from twisted.trial import unittest
from twisted.internet import defer
from twisted.protocols import loopback
from twisted.python import randbytes
from twisted.python.reflect import qual, getClass
from twisted.conch.ssh import address, service, common
from twisted.test import proto_helpers
from twisted.conch.error import ConchError
class MockTransportBase(transport.SSHTransportBase):
"""
A base class for the client and server protocols. Stores the messages
it receieves instead of ignoring them.
@ivar errors: a list of tuples: (reasonCode, description)
@ivar unimplementeds: a list of integers: sequence number
@ivar debugs: a list of tuples: (alwaysDisplay, message, lang)
@ivar ignoreds: a list of strings: ignored data
"""
def connectionMade(self):
"""
Set up instance variables.
"""
transport.SSHTransportBase.connectionMade(self)
self.errors = []
self.unimplementeds = []
self.debugs = []
self.ignoreds = []
self.gotUnsupportedVersion = None
def _unsupportedVersionReceived(self, remoteVersion):
"""
Intercept unsupported version call.
@type remoteVersion: C{str}
"""
self.gotUnsupportedVersion = remoteVersion
return transport.SSHTransportBase._unsupportedVersionReceived(
self, remoteVersion)
def receiveError(self, reasonCode, description):
"""
Store any errors received.
@type reasonCode: C{int}
@type description: C{str}
"""
self.errors.append((reasonCode, description))
def receiveUnimplemented(self, seqnum):
"""
Store any unimplemented packet messages.
@type seqnum: C{int}
"""
self.unimplementeds.append(seqnum)
def receiveDebug(self, alwaysDisplay, message, lang):
"""
Store any debug messages.
@type alwaysDisplay: C{bool}
@type message: C{str}
@type lang: C{str}
"""
self.debugs.append((alwaysDisplay, message, lang))
def ssh_IGNORE(self, packet):
"""
Store any ignored data.
@type packet: C{str}
"""
self.ignoreds.append(packet)
class MockCipher(object):
"""
A mocked-up version of twisted.conch.ssh.transport.SSHCiphers.
"""
outCipType = 'test'
encBlockSize = 6
inCipType = 'test'
decBlockSize = 6
inMACType = 'test'
outMACType = 'test'
verifyDigestSize = 1
usedEncrypt = False
usedDecrypt = False
outMAC = (None, '', '', 1)
inMAC = (None, '', '', 1)
keys = ()
def encrypt(self, x):
"""
Called to encrypt the packet. Simply record that encryption was used
and return the data unchanged.
"""
self.usedEncrypt = True
if (len(x) % self.encBlockSize) != 0:
raise RuntimeError("length %i modulo blocksize %i is not 0: %i" %
(len(x), self.encBlockSize, len(x) % self.encBlockSize))
return x
def decrypt(self, x):
"""
Called to decrypt the packet. Simply record that decryption was used
and return the data unchanged.
"""
self.usedDecrypt = True
if (len(x) % self.encBlockSize) != 0:
raise RuntimeError("length %i modulo blocksize %i is not 0: %i" %
(len(x), self.decBlockSize, len(x) % self.decBlockSize))
return x
def makeMAC(self, outgoingPacketSequence, payload):
"""
Make a Message Authentication Code by sending the character value of
the outgoing packet.
"""
return chr(outgoingPacketSequence)
def verify(self, incomingPacketSequence, packet, macData):
"""
Verify the Message Authentication Code by checking that the packet
sequence number is the same.
"""
return chr(incomingPacketSequence) == macData
def setKeys(self, ivOut, keyOut, ivIn, keyIn, macIn, macOut):
"""
Record the keys.
"""
self.keys = (ivOut, keyOut, ivIn, keyIn, macIn, macOut)
class MockCompression:
"""
A mocked-up compression, based on the zlib interface. Instead of
compressing, it reverses the data and adds a 0x66 byte to the end.
"""
def compress(self, payload):
return payload[::-1] # reversed
def decompress(self, payload):
return payload[:-1][::-1]
def flush(self, kind):
return '\x66'
class MockService(service.SSHService):
"""
A mocked-up service, based on twisted.conch.ssh.service.SSHService.
@ivar started: True if this service has been started.
@ivar stopped: True if this service has been stopped.
"""
name = "MockService"
started = False
stopped = False
protocolMessages = {0xff: "MSG_TEST", 71: "MSG_fiction"}
def logPrefix(self):
return "MockService"
def serviceStarted(self):
"""
Record that the service was started.
"""
self.started = True
def serviceStopped(self):
"""
Record that the service was stopped.
"""
self.stopped = True
def ssh_TEST(self, packet):
"""
A message that this service responds to.
"""
self.transport.sendPacket(0xff, packet)
class MockFactory(factory.SSHFactory):
"""
A mocked-up factory based on twisted.conch.ssh.factory.SSHFactory.
"""
services = {
'ssh-userauth': MockService}
def getPublicKeys(self):
"""
Return the public keys that authenticate this server.
"""
return {
'ssh-rsa': keys.Key.fromString(keydata.publicRSA_openssh),
'ssh-dsa': keys.Key.fromString(keydata.publicDSA_openssh)}
def getPrivateKeys(self):
"""
Return the private keys that authenticate this server.
"""
return {
'ssh-rsa': keys.Key.fromString(keydata.privateRSA_openssh),
'ssh-dsa': keys.Key.fromString(keydata.privateDSA_openssh)}
def getPrimes(self):
"""
Return the Diffie-Hellman primes that can be used for the
diffie-hellman-group-exchange-sha1 key exchange.
"""
return {
1024: ((2, transport.DH_PRIME),),
2048: ((3, transport.DH_PRIME),),
4096: ((5, 7),)}
class MockOldFactoryPublicKeys(MockFactory):
"""
The old SSHFactory returned mappings from key names to strings from
getPublicKeys(). We return those here for testing.
"""
def getPublicKeys(self):
"""
We used to map key types to public key blobs as strings.
"""
keys = MockFactory.getPublicKeys(self)
for name, key in keys.items()[:]:
keys[name] = key.blob()
return keys
class MockOldFactoryPrivateKeys(MockFactory):
"""
The old SSHFactory returned mappings from key names to PyCrypto key
objects from getPrivateKeys(). We return those here for testing.
"""
def getPrivateKeys(self):
"""
We used to map key types to PyCrypto key objects.
"""
keys = MockFactory.getPrivateKeys(self)
for name, key in keys.items()[:]:
keys[name] = key.keyObject
return keys
class TransportTestCase(unittest.TestCase):
"""
Base class for transport test cases.
"""
klass = None
if dependencySkip:
skip = dependencySkip
def setUp(self):
self.transport = proto_helpers.StringTransport()
self.proto = self.klass()
self.packets = []
def secureRandom(len):
"""
Return a consistent entropy value
"""
return '\x99' * len
self.oldSecureRandom = randbytes.secureRandom
randbytes.secureRandom = secureRandom
def stubSendPacket(messageType, payload):
self.packets.append((messageType, payload))
self.proto.makeConnection(self.transport)
# we just let the kex packet go into the transport
self.proto.sendPacket = stubSendPacket
def finishKeyExchange(self, proto):
"""
Deliver enough additional messages to C{proto} so that the key exchange
which is started in L{SSHTransportBase.connectionMade} completes and
non-key exchange messages can be sent and received.
"""
proto.dataReceived("SSH-2.0-BogoClient-1.2i\r\n")
proto.dispatchMessage(
transport.MSG_KEXINIT, self._A_KEXINIT_MESSAGE)
proto._keySetup("foo", "bar")
# SSHTransportBase can't handle MSG_NEWKEYS, or it would be the right
# thing to deliver next. _newKeys won't work either, because
# sendKexInit (probably) hasn't been called. sendKexInit is
# responsible for setting up certain state _newKeys relies on. So,
# just change the key exchange state to what it would be when key
# exchange is finished.
proto._keyExchangeState = proto._KEY_EXCHANGE_NONE
def tearDown(self):
randbytes.secureRandom = self.oldSecureRandom
self.oldSecureRandom = None
def simulateKeyExchange(self, sharedSecret, exchangeHash):
"""
Finish a key exchange by calling C{_keySetup} with the given arguments.
Also do extra whitebox stuff to satisfy that method's assumption that
some kind of key exchange has actually taken place.
"""
self.proto._keyExchangeState = self.proto._KEY_EXCHANGE_REQUESTED
self.proto._blockedByKeyExchange = []
self.proto._keySetup(sharedSecret, exchangeHash)
class BaseSSHTransportTestCase(TransportTestCase):
"""
Test TransportBase. It implements the non-server/client specific
parts of the SSH transport protocol.
"""
klass = MockTransportBase
_A_KEXINIT_MESSAGE = (
"\xAA" * 16 +
common.NS('diffie-hellman-group1-sha1') +
common.NS('ssh-rsa') +
common.NS('aes256-ctr') +
common.NS('aes256-ctr') +
common.NS('hmac-sha1') +
common.NS('hmac-sha1') +
common.NS('none') +
common.NS('none') +
common.NS('') +
common.NS('') +
'\x00' + '\x00\x00\x00\x00')
def test_sendVersion(self):
"""
Test that the first thing sent over the connection is the version
string.
"""
# the other setup was done in the setup method
self.assertEqual(self.transport.value().split('\r\n', 1)[0],
"SSH-2.0-Twisted")
def test_sendPacketPlain(self):
"""
Test that plain (unencrypted, uncompressed) packets are sent
correctly. The format is::
uint32 length (including type and padding length)
byte padding length
byte type
bytes[length-padding length-2] data
bytes[padding length] padding
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.finishKeyExchange(proto)
self.transport.clear()
message = ord('A')
payload = 'BCDEFG'
proto.sendPacket(message, payload)
value = self.transport.value()
self.assertEqual(value, '\x00\x00\x00\x0c\x04ABCDEFG\x99\x99\x99\x99')
def test_sendPacketEncrypted(self):
"""
Test that packets sent while encryption is enabled are sent
correctly. The whole packet should be encrypted.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.finishKeyExchange(proto)
proto.currentEncryptions = testCipher = MockCipher()
message = ord('A')
payload = 'BC'
self.transport.clear()
proto.sendPacket(message, payload)
self.assertTrue(testCipher.usedEncrypt)
value = self.transport.value()
self.assertEqual(
value,
# Four byte length prefix
'\x00\x00\x00\x08'
# One byte padding length
'\x04'
# The actual application data
'ABC'
# "Random" padding - see the secureRandom monkeypatch in setUp
'\x99\x99\x99\x99'
# The MAC
'\x02')
def test_sendPacketCompressed(self):
"""
Test that packets sent while compression is enabled are sent
correctly. The packet type and data should be encrypted.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.finishKeyExchange(proto)
proto.outgoingCompression = MockCompression()
self.transport.clear()
proto.sendPacket(ord('A'), 'B')
value = self.transport.value()
self.assertEqual(
value,
'\x00\x00\x00\x0c\x08BA\x66\x99\x99\x99\x99\x99\x99\x99\x99')
def test_sendPacketBoth(self):
"""
Test that packets sent while compression and encryption are
enabled are sent correctly. The packet type and data should be
compressed and then the whole packet should be encrypted.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.finishKeyExchange(proto)
proto.currentEncryptions = testCipher = MockCipher()
proto.outgoingCompression = MockCompression()
message = ord('A')
payload = 'BC'
self.transport.clear()
proto.sendPacket(message, payload)
self.assertTrue(testCipher.usedEncrypt)
value = self.transport.value()
self.assertEqual(
value,
# Four byte length prefix
'\x00\x00\x00\x0e'
# One byte padding length
'\x09'
# Compressed application data
'CBA\x66'
# "Random" padding - see the secureRandom monkeypatch in setUp
'\x99\x99\x99\x99\x99\x99\x99\x99\x99'
# The MAC
'\x02')
def test_getPacketPlain(self):
"""
Test that packets are retrieved correctly out of the buffer when
no encryption is enabled.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.finishKeyExchange(proto)
self.transport.clear()
proto.sendPacket(ord('A'), 'BC')
proto.buf = self.transport.value() + 'extra'
self.assertEqual(proto.getPacket(), 'ABC')
self.assertEqual(proto.buf, 'extra')
def test_getPacketEncrypted(self):
"""
Test that encrypted packets are retrieved correctly.
See test_sendPacketEncrypted.
"""
proto = MockTransportBase()
proto.sendKexInit = lambda: None # don't send packets
proto.makeConnection(self.transport)
self.transport.clear()
proto.currentEncryptions = testCipher = MockCipher()
proto.sendPacket(ord('A'), 'BCD')
value = self.transport.value()
proto.buf = value[:MockCipher.decBlockSize]
self.assertEqual(proto.getPacket(), None)
self.assertTrue(testCipher.usedDecrypt)
self.assertEqual(proto.first, '\x00\x00\x00\x0e\x09A')
proto.buf += value[MockCipher.decBlockSize:]
self.assertEqual(proto.getPacket(), 'ABCD')
self.assertEqual(proto.buf, '')
def test_getPacketCompressed(self):
"""
Test that compressed packets are retrieved correctly. See
test_sendPacketCompressed.
"""
proto = MockTransportBase()
proto.makeConnection(self.transport)
self.finishKeyExchange(proto)
self.transport.clear()
proto.outgoingCompression = MockCompression()
proto.incomingCompression = proto.outgoingCompression
proto.sendPacket(ord('A'), 'BCD')
proto.buf = self.transport.value()
self.assertEqual(proto.getPacket(), 'ABCD')
def test_getPacketBoth(self):
"""
Test that compressed and encrypted packets are retrieved correctly.
See test_sendPacketBoth.
"""
proto = MockTransportBase()
proto.sendKexInit = lambda: None
proto.makeConnection(self.transport)
self.transport.clear()
proto.currentEncryptions = MockCipher()
proto.outgoingCompression = MockCompression()
proto.incomingCompression = proto.outgoingCompression
proto.sendPacket(ord('A'), 'BCDEFG')
proto.buf = self.transport.value()
self.assertEqual(proto.getPacket(), 'ABCDEFG')
def test_ciphersAreValid(self):
"""
Test that all the supportedCiphers are valid.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
iv = key = '\x00' * 16
for cipName in self.proto.supportedCiphers:
self.assertTrue(ciphers._getCipher(cipName, iv, key))
def test_sendKexInit(self):
"""
Test that the KEXINIT (key exchange initiation) message is sent
correctly. Payload::
bytes[16] cookie
string key exchange algorithms
string public key algorithms
string outgoing ciphers
string incoming ciphers
string outgoing MACs
string incoming MACs
string outgoing compressions
string incoming compressions
bool first packet follows
uint32 0
"""
value = self.transport.value().split('\r\n', 1)[1]
self.proto.buf = value
packet = self.proto.getPacket()
self.assertEqual(packet[0], chr(transport.MSG_KEXINIT))
self.assertEqual(packet[1:17], '\x99' * 16)
(kex, pubkeys, ciphers1, ciphers2, macs1, macs2, compressions1,
compressions2, languages1, languages2,
buf) = common.getNS(packet[17:], 10)
self.assertEqual(kex, ','.join(self.proto.supportedKeyExchanges))
self.assertEqual(pubkeys, ','.join(self.proto.supportedPublicKeys))
self.assertEqual(ciphers1, ','.join(self.proto.supportedCiphers))
self.assertEqual(ciphers2, ','.join(self.proto.supportedCiphers))
self.assertEqual(macs1, ','.join(self.proto.supportedMACs))
self.assertEqual(macs2, ','.join(self.proto.supportedMACs))
self.assertEqual(compressions1,
','.join(self.proto.supportedCompressions))
self.assertEqual(compressions2,
','.join(self.proto.supportedCompressions))
self.assertEqual(languages1, ','.join(self.proto.supportedLanguages))
self.assertEqual(languages2, ','.join(self.proto.supportedLanguages))
self.assertEqual(buf, '\x00' * 5)
def test_receiveKEXINITReply(self):
"""
Immediately after connecting, the transport expects a KEXINIT message
and does not reply to it.
"""
self.transport.clear()
self.proto.dispatchMessage(
transport.MSG_KEXINIT, self._A_KEXINIT_MESSAGE)
self.assertEqual(self.packets, [])
def test_sendKEXINITReply(self):
"""
When a KEXINIT message is received which is not a reply to an earlier
KEXINIT message which was sent, a KEXINIT reply is sent.
"""
self.finishKeyExchange(self.proto)
del self.packets[:]
self.proto.dispatchMessage(
transport.MSG_KEXINIT, self._A_KEXINIT_MESSAGE)
self.assertEqual(len(self.packets), 1)
self.assertEqual(self.packets[0][0], transport.MSG_KEXINIT)
def test_sendKexInitTwiceFails(self):
"""
A new key exchange cannot be started while a key exchange is already in
progress. If an attempt is made to send a I{KEXINIT} message using
L{SSHTransportBase.sendKexInit} while a key exchange is in progress
causes that method to raise a L{RuntimeError}.
"""
self.assertRaises(RuntimeError, self.proto.sendKexInit)
def test_sendKexInitBlocksOthers(self):
"""
After L{SSHTransportBase.sendKexInit} has been called, messages types
other than the following are queued and not sent until after I{NEWKEYS}
is sent by L{SSHTransportBase._keySetup}.
RFC 4253, section 7.1.
"""
# sendKexInit is called by connectionMade, which is called in setUp.
# So we're in the state already.
disallowedMessageTypes = [
transport.MSG_SERVICE_REQUEST,
transport.MSG_KEXINIT,
]
# Drop all the bytes sent by setUp, they're not relevant to this test.
self.transport.clear()
# Get rid of the sendPacket monkey patch, we are testing the behavior
# of sendPacket.
del self.proto.sendPacket
for messageType in disallowedMessageTypes:
self.proto.sendPacket(messageType, 'foo')
self.assertEqual(self.transport.value(), "")
self.finishKeyExchange(self.proto)
# Make the bytes written to the transport cleartext so it's easier to
# make an assertion about them.
self.proto.nextEncryptions = MockCipher()
# Pseudo-deliver the peer's NEWKEYS message, which should flush the
# messages which were queued above.
self.proto._newKeys()
self.assertEqual(self.transport.value().count("foo"), 2)
def test_sendDebug(self):
"""
Test that debug messages are sent correctly. Payload::
bool always display
string debug message
string language
"""
self.proto.sendDebug("test", True, 'en')
self.assertEqual(
self.packets,
[(transport.MSG_DEBUG,
"\x01\x00\x00\x00\x04test\x00\x00\x00\x02en")])
def test_receiveDebug(self):
"""
Test that debug messages are received correctly. See test_sendDebug.
"""
self.proto.dispatchMessage(
transport.MSG_DEBUG,
'\x01\x00\x00\x00\x04test\x00\x00\x00\x02en')
self.assertEqual(self.proto.debugs, [(True, 'test', 'en')])
def test_sendIgnore(self):
"""
Test that ignored messages are sent correctly. Payload::
string ignored data
"""
self.proto.sendIgnore("test")
self.assertEqual(
self.packets, [(transport.MSG_IGNORE,
'\x00\x00\x00\x04test')])
def test_receiveIgnore(self):
"""
Test that ignored messages are received correctly. See
test_sendIgnore.
"""
self.proto.dispatchMessage(transport.MSG_IGNORE, 'test')
self.assertEqual(self.proto.ignoreds, ['test'])
def test_sendUnimplemented(self):
"""
Test that unimplemented messages are sent correctly. Payload::
uint32 sequence number
"""
self.proto.sendUnimplemented()
self.assertEqual(
self.packets, [(transport.MSG_UNIMPLEMENTED,
'\x00\x00\x00\x00')])
def test_receiveUnimplemented(self):
"""
Test that unimplemented messages are received correctly. See
test_sendUnimplemented.
"""
self.proto.dispatchMessage(transport.MSG_UNIMPLEMENTED,
'\x00\x00\x00\xff')
self.assertEqual(self.proto.unimplementeds, [255])
def test_sendDisconnect(self):
"""
Test that disconnection messages are sent correctly. Payload::
uint32 reason code
string reason description
string language
"""
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
self.proto.sendDisconnect(0xff, "test")
self.assertEqual(
self.packets,
[(transport.MSG_DISCONNECT,
"\x00\x00\x00\xff\x00\x00\x00\x04test\x00\x00\x00\x00")])
self.assertTrue(disconnected[0])
def test_receiveDisconnect(self):
"""
Test that disconnection messages are received correctly. See
test_sendDisconnect.
"""
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
self.proto.dispatchMessage(transport.MSG_DISCONNECT,
'\x00\x00\x00\xff\x00\x00\x00\x04test')
self.assertEqual(self.proto.errors, [(255, 'test')])
self.assertTrue(disconnected[0])
def test_dataReceived(self):
"""
Test that dataReceived parses packets and dispatches them to
ssh_* methods.
"""
kexInit = [False]
def stubKEXINIT(packet):
kexInit[0] = True
self.proto.ssh_KEXINIT = stubKEXINIT
self.proto.dataReceived(self.transport.value())
self.assertTrue(self.proto.gotVersion)
self.assertEqual(self.proto.ourVersionString,
self.proto.otherVersionString)
self.assertTrue(kexInit[0])
def test_service(self):
"""
Test that the transport can set the running service and dispatches
packets to the service's packetReceived method.
"""
service = MockService()
self.proto.setService(service)
self.assertEqual(self.proto.service, service)
self.assertTrue(service.started)
self.proto.dispatchMessage(0xff, "test")
self.assertEqual(self.packets, [(0xff, "test")])
service2 = MockService()
self.proto.setService(service2)
self.assertTrue(service2.started)
self.assertTrue(service.stopped)
self.proto.connectionLost(None)
self.assertTrue(service2.stopped)
def test_avatar(self):
"""
Test that the transport notifies the avatar of disconnections.
"""
disconnected = [False]
def logout():
disconnected[0] = True
self.proto.logoutFunction = logout
self.proto.avatar = True
self.proto.connectionLost(None)
self.assertTrue(disconnected[0])
def test_isEncrypted(self):
"""
Test that the transport accurately reflects its encrypted status.
"""
self.assertFalse(self.proto.isEncrypted('in'))
self.assertFalse(self.proto.isEncrypted('out'))
self.assertFalse(self.proto.isEncrypted('both'))
self.proto.currentEncryptions = MockCipher()
self.assertTrue(self.proto.isEncrypted('in'))
self.assertTrue(self.proto.isEncrypted('out'))
self.assertTrue(self.proto.isEncrypted('both'))
self.proto.currentEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.assertFalse(self.proto.isEncrypted('in'))
self.assertFalse(self.proto.isEncrypted('out'))
self.assertFalse(self.proto.isEncrypted('both'))
self.assertRaises(TypeError, self.proto.isEncrypted, 'bad')
def test_isVerified(self):
"""
Test that the transport accurately reflects its verified status.
"""
self.assertFalse(self.proto.isVerified('in'))
self.assertFalse(self.proto.isVerified('out'))
self.assertFalse(self.proto.isVerified('both'))
self.proto.currentEncryptions = MockCipher()
self.assertTrue(self.proto.isVerified('in'))
self.assertTrue(self.proto.isVerified('out'))
self.assertTrue(self.proto.isVerified('both'))
self.proto.currentEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.assertFalse(self.proto.isVerified('in'))
self.assertFalse(self.proto.isVerified('out'))
self.assertFalse(self.proto.isVerified('both'))
self.assertRaises(TypeError, self.proto.isVerified, 'bad')
def test_loseConnection(self):
"""
Test that loseConnection sends a disconnect message and closes the
connection.
"""
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
self.proto.loseConnection()
self.assertEqual(self.packets[0][0], transport.MSG_DISCONNECT)
self.assertEqual(self.packets[0][1][3],
chr(transport.DISCONNECT_CONNECTION_LOST))
def test_badVersion(self):
"""
Test that the transport disconnects when it receives a bad version.
"""
def testBad(version):
self.packets = []
self.proto.gotVersion = False
disconnected = [False]
def stubLoseConnection():
disconnected[0] = True
self.transport.loseConnection = stubLoseConnection
for c in version + '\r\n':
self.proto.dataReceived(c)
self.assertTrue(disconnected[0])
self.assertEqual(self.packets[0][0], transport.MSG_DISCONNECT)
self.assertEqual(
self.packets[0][1][3],
chr(transport.DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED))
testBad('SSH-1.5-OpenSSH')
testBad('SSH-3.0-Twisted')
testBad('GET / HTTP/1.1')
def test_dataBeforeVersion(self):
"""
Test that the transport ignores data sent before the version string.
"""
proto = MockTransportBase()
proto.makeConnection(proto_helpers.StringTransport())
data = ("""here's some stuff beforehand
here's some other stuff
""" + proto.ourVersionString + "\r\n")
[proto.dataReceived(c) for c in data]
self.assertTrue(proto.gotVersion)
self.assertEqual(proto.otherVersionString, proto.ourVersionString)
def test_compatabilityVersion(self):
"""
Test that the transport treats the compatbility version (1.99)
as equivalent to version 2.0.
"""
proto = MockTransportBase()
proto.makeConnection(proto_helpers.StringTransport())
proto.dataReceived("SSH-1.99-OpenSSH\n")
self.assertTrue(proto.gotVersion)
self.assertEqual(proto.otherVersionString, "SSH-1.99-OpenSSH")
def test_supportedVersionsAreAllowed(self):
"""
If an unusual SSH version is received and is included in
C{supportedVersions}, an unsupported version error is not emitted.
"""
proto = MockTransportBase()
proto.supportedVersions = ("9.99", )
proto.makeConnection(proto_helpers.StringTransport())
proto.dataReceived("SSH-9.99-OpenSSH\n")
self.assertFalse(proto.gotUnsupportedVersion)
def test_unsupportedVersionsCallUnsupportedVersionReceived(self):
"""
If an unusual SSH version is received and is not included in
C{supportedVersions}, an unsupported version error is emitted.
"""
proto = MockTransportBase()
proto.supportedVersions = ("2.0", )
proto.makeConnection(proto_helpers.StringTransport())
proto.dataReceived("SSH-9.99-OpenSSH\n")
self.assertEqual("9.99", proto.gotUnsupportedVersion)
def test_badPackets(self):
"""
Test that the transport disconnects with an error when it receives
bad packets.
"""
def testBad(packet, error=transport.DISCONNECT_PROTOCOL_ERROR):
self.packets = []
self.proto.buf = packet
self.assertEqual(self.proto.getPacket(), None)
self.assertEqual(len(self.packets), 1)
self.assertEqual(self.packets[0][0], transport.MSG_DISCONNECT)
self.assertEqual(self.packets[0][1][3], chr(error))
testBad('\xff' * 8) # big packet
testBad('\x00\x00\x00\x05\x00BCDE') # length not modulo blocksize
oldEncryptions = self.proto.currentEncryptions
self.proto.currentEncryptions = MockCipher()
testBad('\x00\x00\x00\x08\x06AB123456', # bad MAC
transport.DISCONNECT_MAC_ERROR)
self.proto.currentEncryptions.decrypt = lambda x: x[:-1]
testBad('\x00\x00\x00\x08\x06BCDEFGHIJK') # bad decryption
self.proto.currentEncryptions = oldEncryptions
self.proto.incomingCompression = MockCompression()
def stubDecompress(payload):
raise Exception('bad compression')
self.proto.incomingCompression.decompress = stubDecompress
testBad('\x00\x00\x00\x04\x00BCDE', # bad decompression
transport.DISCONNECT_COMPRESSION_ERROR)
self.flushLoggedErrors()
def test_unimplementedPackets(self):
"""
Test that unimplemented packet types cause MSG_UNIMPLEMENTED packets
to be sent.
"""
seqnum = self.proto.incomingPacketSequence
def checkUnimplemented(seqnum=seqnum):
self.assertEqual(self.packets[0][0],
transport.MSG_UNIMPLEMENTED)
self.assertEqual(self.packets[0][1][3], chr(seqnum))
self.proto.packets = []
seqnum += 1
self.proto.dispatchMessage(40, '')
checkUnimplemented()
transport.messages[41] = 'MSG_fiction'
self.proto.dispatchMessage(41, '')
checkUnimplemented()
self.proto.dispatchMessage(60, '')
checkUnimplemented()
self.proto.setService(MockService())
self.proto.dispatchMessage(70, '')
checkUnimplemented()
self.proto.dispatchMessage(71, '')
checkUnimplemented()
def test_getKey(self):
"""
Test that _getKey generates the correct keys.
"""
self.proto.sessionID = 'EF'
k1 = sha1('AB' + 'CD' + 'K' + self.proto.sessionID).digest()
k2 = sha1('ABCD' + k1).digest()
self.assertEqual(self.proto._getKey('K', 'AB', 'CD'), k1 + k2)
def test_multipleClasses(self):
"""
Test that multiple instances have distinct states.
"""
proto = self.proto
proto.dataReceived(self.transport.value())
proto.currentEncryptions = MockCipher()
proto.outgoingCompression = MockCompression()
proto.incomingCompression = MockCompression()
proto.setService(MockService())
proto2 = MockTransportBase()
proto2.makeConnection(proto_helpers.StringTransport())
proto2.sendIgnore('')
self.failIfEquals(proto.gotVersion, proto2.gotVersion)
self.failIfEquals(proto.transport, proto2.transport)
self.failIfEquals(proto.outgoingPacketSequence,
proto2.outgoingPacketSequence)
self.failIfEquals(proto.incomingPacketSequence,
proto2.incomingPacketSequence)
self.failIfEquals(proto.currentEncryptions,
proto2.currentEncryptions)
self.failIfEquals(proto.service, proto2.service)
class ServerAndClientSSHTransportBaseCase:
"""
Tests that need to be run on both the server and the client.
"""
def checkDisconnected(self, kind=None):
"""
Helper function to check if the transport disconnected.
"""
if kind is None:
kind = transport.DISCONNECT_PROTOCOL_ERROR
self.assertEqual(self.packets[-1][0], transport.MSG_DISCONNECT)
self.assertEqual(self.packets[-1][1][3], chr(kind))
def connectModifiedProtocol(self, protoModification,
kind=None):
"""
Helper function to connect a modified protocol to the test protocol
and test for disconnection.
"""
if kind is None:
kind = transport.DISCONNECT_KEY_EXCHANGE_FAILED
proto2 = self.klass()
protoModification(proto2)
proto2.makeConnection(proto_helpers.StringTransport())
self.proto.dataReceived(proto2.transport.value())
if kind:
self.checkDisconnected(kind)
return proto2
def test_disconnectIfCantMatchKex(self):
"""
Test that the transport disconnects if it can't match the key
exchange
"""
def blankKeyExchanges(proto2):
proto2.supportedKeyExchanges = []
self.connectModifiedProtocol(blankKeyExchanges)
def test_disconnectIfCantMatchKeyAlg(self):
"""
Like test_disconnectIfCantMatchKex, but for the key algorithm.
"""
def blankPublicKeys(proto2):
proto2.supportedPublicKeys = []
self.connectModifiedProtocol(blankPublicKeys)
def test_disconnectIfCantMatchCompression(self):
"""
Like test_disconnectIfCantMatchKex, but for the compression.
"""
def blankCompressions(proto2):
proto2.supportedCompressions = []
self.connectModifiedProtocol(blankCompressions)
def test_disconnectIfCantMatchCipher(self):
"""
Like test_disconnectIfCantMatchKex, but for the encryption.
"""
def blankCiphers(proto2):
proto2.supportedCiphers = []
self.connectModifiedProtocol(blankCiphers)
def test_disconnectIfCantMatchMAC(self):
"""
Like test_disconnectIfCantMatchKex, but for the MAC.
"""
def blankMACs(proto2):
proto2.supportedMACs = []
self.connectModifiedProtocol(blankMACs)
def test_getPeer(self):
"""
Test that the transport's L{getPeer} method returns an
L{SSHTransportAddress} with the L{IAddress} of the peer.
"""
self.assertEqual(self.proto.getPeer(),
address.SSHTransportAddress(
self.proto.transport.getPeer()))
def test_getHost(self):
"""
Test that the transport's L{getHost} method returns an
L{SSHTransportAddress} with the L{IAddress} of the host.
"""
self.assertEqual(self.proto.getHost(),
address.SSHTransportAddress(
self.proto.transport.getHost()))
class ServerSSHTransportTestCase(ServerAndClientSSHTransportBaseCase,
TransportTestCase):
"""
Tests for the SSHServerTransport.
"""
klass = transport.SSHServerTransport
def setUp(self):
TransportTestCase.setUp(self)
self.proto.factory = MockFactory()
self.proto.factory.startFactory()
def tearDown(self):
TransportTestCase.tearDown(self)
self.proto.factory.stopFactory()
del self.proto.factory
def test_KEXINIT(self):
"""
Test that receiving a KEXINIT packet sets up the correct values on the
server.
"""
self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14'
'\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g'
'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00'
'\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae'
's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d'
'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c'
'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b'
'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma'
'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00'
'\tnone,zlib\x00\x00\x00\tnone,zlib\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99')
self.assertEqual(self.proto.kexAlg,
'diffie-hellman-group1-sha1')
self.assertEqual(self.proto.keyAlg,
'ssh-dss')
self.assertEqual(self.proto.outgoingCompressionType,
'none')
self.assertEqual(self.proto.incomingCompressionType,
'none')
ne = self.proto.nextEncryptions
self.assertEqual(ne.outCipType, 'aes128-ctr')
self.assertEqual(ne.inCipType, 'aes128-ctr')
self.assertEqual(ne.outMACType, 'hmac-md5')
self.assertEqual(ne.inMACType, 'hmac-md5')
def test_ignoreGuessPacketKex(self):
"""
The client is allowed to send a guessed key exchange packet
after it sends the KEXINIT packet. However, if the key exchanges
do not match, that guess packet must be ignored. This tests that
the packet is ignored in the case of the key exchange method not
matching.
"""
kexInitPacket = '\x00' * 16 + (
''.join([common.NS(x) for x in
[','.join(y) for y in
[self.proto.supportedKeyExchanges[::-1],
self.proto.supportedPublicKeys,
self.proto.supportedCiphers,
self.proto.supportedCiphers,
self.proto.supportedMACs,
self.proto.supportedMACs,
self.proto.supportedCompressions,
self.proto.supportedCompressions,
self.proto.supportedLanguages,
self.proto.supportedLanguages]]])) + (
'\xff\x00\x00\x00\x00')
self.proto.ssh_KEXINIT(kexInitPacket)
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00")
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00')
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEqual(self.packets, [])
self.proto.ignoreNextPacket = True
self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3)
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEqual(self.packets, [])
def test_ignoreGuessPacketKey(self):
"""
Like test_ignoreGuessPacketKex, but for an incorrectly guessed
public key format.
"""
kexInitPacket = '\x00' * 16 + (
''.join([common.NS(x) for x in
[','.join(y) for y in
[self.proto.supportedKeyExchanges,
self.proto.supportedPublicKeys[::-1],
self.proto.supportedCiphers,
self.proto.supportedCiphers,
self.proto.supportedMACs,
self.proto.supportedMACs,
self.proto.supportedCompressions,
self.proto.supportedCompressions,
self.proto.supportedLanguages,
self.proto.supportedLanguages]]])) + (
'\xff\x00\x00\x00\x00')
self.proto.ssh_KEXINIT(kexInitPacket)
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00")
self.assertTrue(self.proto.ignoreNextPacket)
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00')
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEqual(self.packets, [])
self.proto.ignoreNextPacket = True
self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3)
self.assertFalse(self.proto.ignoreNextPacket)
self.assertEqual(self.packets, [])
def test_KEXDH_INIT(self):
"""
Test that the KEXDH_INIT packet causes the server to send a
KEXDH_REPLY with the server's public key and a signature.
"""
self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1']
self.proto.supportedPublicKeys = ['ssh-rsa']
self.proto.dataReceived(self.transport.value())
e = pow(transport.DH_GENERATOR, 5000,
transport.DH_PRIME)
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD(common.MP(e))
y = common.getMP('\x00\x00\x00\x40' + '\x99' * 64)[0]
f = common._MPpow(transport.DH_GENERATOR, y, transport.DH_PRIME)
sharedSecret = common._MPpow(e, y, transport.DH_PRIME)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
h.update(common.MP(e))
h.update(f)
h.update(sharedSecret)
exchangeHash = h.digest()
signature = self.proto.factory.privateKeys['ssh-rsa'].sign(
exchangeHash)
self.assertEqual(
self.packets,
[(transport.MSG_KEXDH_REPLY,
common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob())
+ f + common.NS(signature)),
(transport.MSG_NEWKEYS, '')])
def test_KEX_DH_GEX_REQUEST_OLD(self):
"""
Test that the KEX_DH_GEX_REQUEST_OLD message causes the server
to reply with a KEX_DH_GEX_GROUP message with the correct
Diffie-Hellman group.
"""
self.proto.supportedKeyExchanges = [
'diffie-hellman-group-exchange-sha1']
self.proto.supportedPublicKeys = ['ssh-rsa']
self.proto.dataReceived(self.transport.value())
self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x04\x00')
self.assertEqual(
self.packets,
[(transport.MSG_KEX_DH_GEX_GROUP,
common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x02')])
self.assertEqual(self.proto.g, 2)
self.assertEqual(self.proto.p, transport.DH_PRIME)
def test_KEX_DH_GEX_REQUEST_OLD_badKexAlg(self):
"""
Test that if the server recieves a KEX_DH_GEX_REQUEST_OLD message
and the key exchange algorithm is not 'diffie-hellman-group1-sha1' or
'diffie-hellman-group-exchange-sha1', we raise a ConchError.
"""
self.proto.kexAlg = None
self.assertRaises(ConchError, self.proto.ssh_KEX_DH_GEX_REQUEST_OLD,
None)
def test_KEX_DH_GEX_REQUEST(self):
"""
Test that the KEX_DH_GEX_REQUEST message causes the server to reply
with a KEX_DH_GEX_GROUP message with the correct Diffie-Hellman
group.
"""
self.proto.supportedKeyExchanges = [
'diffie-hellman-group-exchange-sha1']
self.proto.supportedPublicKeys = ['ssh-rsa']
self.proto.dataReceived(self.transport.value())
self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x04\x00\x00\x00\x08\x00' +
'\x00\x00\x0c\x00')
self.assertEqual(
self.packets,
[(transport.MSG_KEX_DH_GEX_GROUP,
common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x03')])
self.assertEqual(self.proto.g, 3)
self.assertEqual(self.proto.p, transport.DH_PRIME)
def test_KEX_DH_GEX_INIT_after_REQUEST(self):
"""
Test that the KEX_DH_GEX_INIT message after the client sends
KEX_DH_GEX_REQUEST causes the server to send a KEX_DH_GEX_INIT message
with a public key and signature.
"""
self.test_KEX_DH_GEX_REQUEST()
e = pow(self.proto.g, 3, self.proto.p)
y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0]
f = common._MPpow(self.proto.g, y, self.proto.p)
sharedSecret = common._MPpow(e, y, self.proto.p)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
h.update('\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00\x0c\x00')
h.update(common.MP(self.proto.p))
h.update(common.MP(self.proto.g))
h.update(common.MP(e))
h.update(f)
h.update(sharedSecret)
exchangeHash = h.digest()
self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e))
self.assertEqual(
self.packets[1],
(transport.MSG_KEX_DH_GEX_REPLY,
common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) +
f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign(
exchangeHash))))
def test_KEX_DH_GEX_INIT_after_REQUEST_OLD(self):
"""
Test that the KEX_DH_GEX_INIT message after the client sends
KEX_DH_GEX_REQUEST_OLD causes the server to sent a KEX_DH_GEX_INIT
message with a public key and signature.
"""
self.test_KEX_DH_GEX_REQUEST_OLD()
e = pow(self.proto.g, 3, self.proto.p)
y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0]
f = common._MPpow(self.proto.g, y, self.proto.p)
sharedSecret = common._MPpow(e, y, self.proto.p)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
h.update('\x00\x00\x04\x00')
h.update(common.MP(self.proto.p))
h.update(common.MP(self.proto.g))
h.update(common.MP(e))
h.update(f)
h.update(sharedSecret)
exchangeHash = h.digest()
self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e))
self.assertEqual(
self.packets[1:],
[(transport.MSG_KEX_DH_GEX_REPLY,
common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) +
f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign(
exchangeHash))),
(transport.MSG_NEWKEYS, '')])
def test_keySetup(self):
"""
Test that _keySetup sets up the next encryption keys.
"""
self.proto.nextEncryptions = MockCipher()
self.simulateKeyExchange('AB', 'CD')
self.assertEqual(self.proto.sessionID, 'CD')
self.simulateKeyExchange('AB', 'EF')
self.assertEqual(self.proto.sessionID, 'CD')
self.assertEqual(self.packets[-1], (transport.MSG_NEWKEYS, ''))
newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF']
self.assertEqual(
self.proto.nextEncryptions.keys,
(newKeys[1], newKeys[3], newKeys[0], newKeys[2], newKeys[5],
newKeys[4]))
def test_NEWKEYS(self):
"""
Test that NEWKEYS transitions the keys in nextEncryptions to
currentEncryptions.
"""
self.test_KEXINIT()
self.proto.nextEncryptions = transport.SSHCiphers('none', 'none',
'none', 'none')
self.proto.ssh_NEWKEYS('')
self.assertIdentical(self.proto.currentEncryptions,
self.proto.nextEncryptions)
self.assertIdentical(self.proto.outgoingCompression, None)
self.assertIdentical(self.proto.incomingCompression, None)
self.proto.outgoingCompressionType = 'zlib'
self.simulateKeyExchange('AB', 'CD')
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.outgoingCompression, None)
self.proto.incomingCompressionType = 'zlib'
self.simulateKeyExchange('AB', 'EF')
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.incomingCompression, None)
def test_SERVICE_REQUEST(self):
"""
Test that the SERVICE_REQUEST message requests and starts a
service.
"""
self.proto.ssh_SERVICE_REQUEST(common.NS('ssh-userauth'))
self.assertEqual(self.packets, [(transport.MSG_SERVICE_ACCEPT,
common.NS('ssh-userauth'))])
self.assertEqual(self.proto.service.name, 'MockService')
def test_disconnectNEWKEYSData(self):
"""
Test that NEWKEYS disconnects if it receives data.
"""
self.proto.ssh_NEWKEYS("bad packet")
self.checkDisconnected()
def test_disconnectSERVICE_REQUESTBadService(self):
"""
Test that SERVICE_REQUESTS disconnects if an unknown service is
requested.
"""
self.proto.ssh_SERVICE_REQUEST(common.NS('no service'))
self.checkDisconnected(transport.DISCONNECT_SERVICE_NOT_AVAILABLE)
class ClientSSHTransportTestCase(ServerAndClientSSHTransportBaseCase,
TransportTestCase):
"""
Tests for SSHClientTransport.
"""
klass = transport.SSHClientTransport
def test_KEXINIT(self):
"""
Test that receiving a KEXINIT packet sets up the correct values on the
client. The way algorithms are picks is that the first item in the
client's list that is also in the server's list is chosen.
"""
self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14'
'\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g'
'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00'
'\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae'
's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d'
'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c'
'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b'
'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma'
'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00'
'\tzlib,none\x00\x00\x00\tzlib,none\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99'
'\x99')
self.assertEqual(self.proto.kexAlg,
'diffie-hellman-group-exchange-sha1')
self.assertEqual(self.proto.keyAlg,
'ssh-rsa')
self.assertEqual(self.proto.outgoingCompressionType,
'none')
self.assertEqual(self.proto.incomingCompressionType,
'none')
ne = self.proto.nextEncryptions
self.assertEqual(ne.outCipType, 'aes256-ctr')
self.assertEqual(ne.inCipType, 'aes256-ctr')
self.assertEqual(ne.outMACType, 'hmac-sha1')
self.assertEqual(ne.inMACType, 'hmac-sha1')
def verifyHostKey(self, pubKey, fingerprint):
"""
Mock version of SSHClientTransport.verifyHostKey.
"""
self.calledVerifyHostKey = True
self.assertEqual(pubKey, self.blob)
self.assertEqual(fingerprint.replace(':', ''),
md5(pubKey).hexdigest())
return defer.succeed(True)
def setUp(self):
TransportTestCase.setUp(self)
self.blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
self.privObj = keys.Key.fromString(keydata.privateRSA_openssh)
self.calledVerifyHostKey = False
self.proto.verifyHostKey = self.verifyHostKey
def test_notImplementedClientMethods(self):
"""
verifyHostKey() should return a Deferred which fails with a
NotImplementedError exception. connectionSecure() should raise
NotImplementedError().
"""
self.assertRaises(NotImplementedError, self.klass().connectionSecure)
def _checkRaises(f):
f.trap(NotImplementedError)
d = self.klass().verifyHostKey(None, None)
return d.addCallback(self.fail).addErrback(_checkRaises)
def test_KEXINIT_groupexchange(self):
"""
Test that a KEXINIT packet with a group-exchange key exchange results
in a KEX_DH_GEX_REQUEST_OLD message..
"""
self.proto.supportedKeyExchanges = [
'diffie-hellman-group-exchange-sha1']
self.proto.dataReceived(self.transport.value())
self.assertEqual(self.packets, [(transport.MSG_KEX_DH_GEX_REQUEST_OLD,
'\x00\x00\x08\x00')])
def test_KEXINIT_group1(self):
"""
Like test_KEXINIT_groupexchange, but for the group-1 key exchange.
"""
self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1']
self.proto.dataReceived(self.transport.value())
self.assertEqual(common.MP(self.proto.x)[5:], '\x99' * 64)
self.assertEqual(self.packets,
[(transport.MSG_KEXDH_INIT, self.proto.e)])
def test_KEXINIT_badKexAlg(self):
"""
Test that the client raises a ConchError if it receives a
KEXINIT message bug doesn't have a key exchange algorithm that we
understand.
"""
self.proto.supportedKeyExchanges = ['diffie-hellman-group2-sha1']
data = self.transport.value().replace('group1', 'group2')
self.assertRaises(ConchError, self.proto.dataReceived, data)
def test_KEXDH_REPLY(self):
"""
Test that the KEXDH_REPLY message verifies the server.
"""
self.test_KEXINIT_group1()
sharedSecret = common._MPpow(transport.DH_GENERATOR,
self.proto.x, transport.DH_PRIME)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.blob))
h.update(self.proto.e)
h.update('\x00\x00\x00\x01\x02') # f
h.update(sharedSecret)
exchangeHash = h.digest()
def _cbTestKEXDH_REPLY(value):
self.assertIdentical(value, None)
self.assertEqual(self.calledVerifyHostKey, True)
self.assertEqual(self.proto.sessionID, exchangeHash)
signature = self.privObj.sign(exchangeHash)
d = self.proto.ssh_KEX_DH_GEX_GROUP(
(common.NS(self.blob) + '\x00\x00\x00\x01\x02' +
common.NS(signature)))
d.addCallback(_cbTestKEXDH_REPLY)
return d
def test_KEX_DH_GEX_GROUP(self):
"""
Test that the KEX_DH_GEX_GROUP message results in a
KEX_DH_GEX_INIT message with the client's Diffie-Hellman public key.
"""
self.test_KEXINIT_groupexchange()
self.proto.ssh_KEX_DH_GEX_GROUP(
'\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02')
self.assertEqual(self.proto.p, 15)
self.assertEqual(self.proto.g, 2)
self.assertEqual(common.MP(self.proto.x)[5:], '\x99' * 40)
self.assertEqual(self.proto.e,
common.MP(pow(2, self.proto.x, 15)))
self.assertEqual(self.packets[1:], [(transport.MSG_KEX_DH_GEX_INIT,
self.proto.e)])
def test_KEX_DH_GEX_REPLY(self):
"""
Test that the KEX_DH_GEX_REPLY message results in a verified
server.
"""
self.test_KEX_DH_GEX_GROUP()
sharedSecret = common._MPpow(3, self.proto.x, self.proto.p)
h = sha1()
h.update(common.NS(self.proto.ourVersionString) * 2)
h.update(common.NS(self.proto.ourKexInitPayload) * 2)
h.update(common.NS(self.blob))
h.update('\x00\x00\x08\x00\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02')
h.update(self.proto.e)
h.update('\x00\x00\x00\x01\x03') # f
h.update(sharedSecret)
exchangeHash = h.digest()
def _cbTestKEX_DH_GEX_REPLY(value):
self.assertIdentical(value, None)
self.assertEqual(self.calledVerifyHostKey, True)
self.assertEqual(self.proto.sessionID, exchangeHash)
signature = self.privObj.sign(exchangeHash)
d = self.proto.ssh_KEX_DH_GEX_REPLY(
common.NS(self.blob) +
'\x00\x00\x00\x01\x03' +
common.NS(signature))
d.addCallback(_cbTestKEX_DH_GEX_REPLY)
return d
def test_keySetup(self):
"""
Test that _keySetup sets up the next encryption keys.
"""
self.proto.nextEncryptions = MockCipher()
self.simulateKeyExchange('AB', 'CD')
self.assertEqual(self.proto.sessionID, 'CD')
self.simulateKeyExchange('AB', 'EF')
self.assertEqual(self.proto.sessionID, 'CD')
self.assertEqual(self.packets[-1], (transport.MSG_NEWKEYS, ''))
newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF']
self.assertEqual(self.proto.nextEncryptions.keys,
(newKeys[0], newKeys[2], newKeys[1], newKeys[3],
newKeys[4], newKeys[5]))
def test_NEWKEYS(self):
"""
Test that NEWKEYS transitions the keys from nextEncryptions to
currentEncryptions.
"""
self.test_KEXINIT()
secure = [False]
def stubConnectionSecure():
secure[0] = True
self.proto.connectionSecure = stubConnectionSecure
self.proto.nextEncryptions = transport.SSHCiphers(
'none', 'none', 'none', 'none')
self.simulateKeyExchange('AB', 'CD')
self.assertNotIdentical(
self.proto.currentEncryptions, self.proto.nextEncryptions)
self.proto.nextEncryptions = MockCipher()
self.proto.ssh_NEWKEYS('')
self.assertIdentical(self.proto.outgoingCompression, None)
self.assertIdentical(self.proto.incomingCompression, None)
self.assertIdentical(self.proto.currentEncryptions,
self.proto.nextEncryptions)
self.assertTrue(secure[0])
self.proto.outgoingCompressionType = 'zlib'
self.simulateKeyExchange('AB', 'GH')
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.outgoingCompression, None)
self.proto.incomingCompressionType = 'zlib'
self.simulateKeyExchange('AB', 'IJ')
self.proto.ssh_NEWKEYS('')
self.failIfIdentical(self.proto.incomingCompression, None)
def test_SERVICE_ACCEPT(self):
"""
Test that the SERVICE_ACCEPT packet starts the requested service.
"""
self.proto.instance = MockService()
self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x0bMockService')
self.assertTrue(self.proto.instance.started)
def test_requestService(self):
"""
Test that requesting a service sends a SERVICE_REQUEST packet.
"""
self.proto.requestService(MockService())
self.assertEqual(self.packets, [(transport.MSG_SERVICE_REQUEST,
'\x00\x00\x00\x0bMockService')])
def test_disconnectKEXDH_REPLYBadSignature(self):
"""
Test that KEXDH_REPLY disconnects if the signature is bad.
"""
self.test_KEXDH_REPLY()
self.proto._continueKEXDH_REPLY(None, self.blob, 3, "bad signature")
self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED)
def test_disconnectGEX_REPLYBadSignature(self):
"""
Like test_disconnectKEXDH_REPLYBadSignature, but for DH_GEX_REPLY.
"""
self.test_KEX_DH_GEX_REPLY()
self.proto._continueGEX_REPLY(None, self.blob, 3, "bad signature")
self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED)
def test_disconnectNEWKEYSData(self):
"""
Test that NEWKEYS disconnects if it receives data.
"""
self.proto.ssh_NEWKEYS("bad packet")
self.checkDisconnected()
def test_disconnectSERVICE_ACCEPT(self):
"""
Test that SERVICE_ACCEPT disconnects if the accepted protocol is
differet from the asked-for protocol.
"""
self.proto.instance = MockService()
self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x03bad')
self.checkDisconnected()
def test_noPayloadSERVICE_ACCEPT(self):
"""
Some commercial SSH servers don't send a payload with the
SERVICE_ACCEPT message. Conch pretends that it got the correct
name of the service.
"""
self.proto.instance = MockService()
self.proto.ssh_SERVICE_ACCEPT('') # no payload
self.assertTrue(self.proto.instance.started)
self.assertEquals(len(self.packets), 0) # not disconnected
class GetMACTestCase(unittest.TestCase):
"""
Tests for L{SSHCiphers._getMAC}.
"""
if dependencySkip:
skip = dependencySkip
def setUp(self):
self.ciphers = transport.SSHCiphers(b'A', b'B', b'C', b'D')
# MD5 digest is 16 bytes. Put some non-zero bytes into that part of
# the key. Maybe varying the bytes a little bit means a bug in the
# implementation is more likely to be caught by the assertions below.
# The remaining 48 bytes of NULs are to pad the key out to 64 bytes.
# It doesn't seem to matter that SHA1 produces a larger digest. The
# material seems always to need to be truncated at 16 bytes.
self.key = '\x55\xaa' * 8 + '\x00' * 48
self.ipad = b''.join(chr(ord(b) ^ 0x36) for b in self.key)
self.opad = b''.join(chr(ord(b) ^ 0x5c) for b in self.key)
def test_hmacsha1(self):
"""
When L{SSHCiphers._getMAC} is called with the C{b"hmac-sha1"} MAC
algorithm name it returns a tuple of (sha1 digest object, inner pad,
outer pad, sha1 digest size) with a C{key} attribute set to the value
of the key supplied.
"""
params = self.ciphers._getMAC(b"hmac-sha1", self.key)
self.assertEqual(
(sha1, self.ipad, self.opad, sha1().digest_size, self.key),
params + (params.key,))
def test_md5sha1(self):
"""
When L{SSHCiphers._getMAC} is called with the C{b"hmac-md5"} MAC
algorithm name it returns a tuple of (md5 digest object, inner pad,
outer pad, md5 digest size) with a C{key} attribute set to the value of
the key supplied.
"""
params = self.ciphers._getMAC(b"hmac-md5", self.key)
self.assertEqual(
(md5, self.ipad, self.opad, md5().digest_size, self.key),
params + (params.key,))
def test_none(self):
"""
When L{SSHCiphers._getMAC} is called with the C{b"none"} MAC algorithm
name it returns a tuple of (None, "", "", 0)
"""
params = self.ciphers._getMAC(b"none", self.key)
self.assertEqual((None, b"", b"", 0), params)
class SSHCiphersTestCase(unittest.TestCase):
"""
Tests for the SSHCiphers helper class.
"""
if dependencySkip:
skip = dependencySkip
def test_init(self):
"""
Test that the initializer sets up the SSHCiphers object.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
self.assertEqual(ciphers.outCipType, 'A')
self.assertEqual(ciphers.inCipType, 'B')
self.assertEqual(ciphers.outMACType, 'C')
self.assertEqual(ciphers.inMACType, 'D')
def test_getCipher(self):
"""
Test that the _getCipher method returns the correct cipher.
"""
ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
iv = key = '\x00' * 16
for cipName, (modName, keySize, counter) in ciphers.cipherMap.items():
cip = ciphers._getCipher(cipName, iv, key)
if cipName == 'none':
self.assertIsInstance(cip, transport._DummyCipher)
else:
self.assertTrue(getClass(cip).__name__.startswith(modName))
def test_setKeysCiphers(self):
"""
Test that setKeys sets up the ciphers.
"""
key = '\x00' * 64
cipherItems = transport.SSHCiphers.cipherMap.items()
for cipName, (modName, keySize, counter) in cipherItems:
encCipher = transport.SSHCiphers(cipName, 'none', 'none', 'none')
decCipher = transport.SSHCiphers('none', cipName, 'none', 'none')
cip = encCipher._getCipher(cipName, key, key)
bs = cip.block_size
encCipher.setKeys(key, key, '', '', '', '')
decCipher.setKeys('', '', key, key, '', '')
self.assertEqual(encCipher.encBlockSize, bs)
self.assertEqual(decCipher.decBlockSize, bs)
enc = cip.encrypt(key[:bs])
enc2 = cip.encrypt(key[:bs])
if counter:
self.failIfEquals(enc, enc2)
self.assertEqual(encCipher.encrypt(key[:bs]), enc)
self.assertEqual(encCipher.encrypt(key[:bs]), enc2)
self.assertEqual(decCipher.decrypt(enc), key[:bs])
self.assertEqual(decCipher.decrypt(enc2), key[:bs])
def test_setKeysMACs(self):
"""
Test that setKeys sets up the MACs.
"""
key = '\x00' * 64
for macName, mod in transport.SSHCiphers.macMap.items():
outMac = transport.SSHCiphers('none', 'none', macName, 'none')
inMac = transport.SSHCiphers('none', 'none', 'none', macName)
outMac.setKeys('', '', '', '', key, '')
inMac.setKeys('', '', '', '', '', key)
if mod:
ds = mod().digest_size
else:
ds = 0
self.assertEqual(inMac.verifyDigestSize, ds)
if mod:
mod, i, o, ds = outMac._getMAC(macName, key)
seqid = 0
data = key
packet = '\x00' * 4 + key
if mod:
mac = mod(o + mod(i + packet).digest()).digest()
else:
mac = ''
self.assertEqual(outMac.makeMAC(seqid, data), mac)
self.assertTrue(inMac.verify(seqid, data, mac))
def test_makeMAC(self):
"""
L{SSHCiphers.makeMAC} computes the HMAC of an outgoing SSH message with
a particular sequence id and content data.
"""
# Use the test vectors given in the appendix of RFC 2104.
vectors = [
(b"\x0b" * 16, b"Hi There",
b"9294727a3638bb1c13f48ef8158bfc9d"),
(b"Jefe", b"what do ya want for nothing?",
b"750c783e6ab0b503eaa86e310a5db738"),
(b"\xAA" * 16, b"\xDD" * 50,
b"56be34521d144c88dbb8c733f0e8b3f6"),
]
for key, data, mac in vectors:
outMAC = transport.SSHCiphers('none', 'none', 'hmac-md5', 'none')
outMAC.outMAC = outMAC._getMAC("hmac-md5", key)
(seqid,) = struct.unpack('>L', data[:4])
shortened = data[4:]
self.assertEqual(
mac, outMAC.makeMAC(seqid, shortened).encode("hex"),
"Failed HMAC test vector; key=%r data=%r" % (key, data))
class CounterTestCase(unittest.TestCase):
"""
Tests for the _Counter helper class.
"""
if dependencySkip:
skip = dependencySkip
def test_init(self):
"""
Test that the counter is initialized correctly.
"""
counter = transport._Counter('\x00' * 8 + '\xff' * 8, 8)
self.assertEqual(counter.blockSize, 8)
self.assertEqual(counter.count.tostring(), '\x00' * 8)
def test_count(self):
"""
Test that the counter counts incrementally and wraps at the top.
"""
counter = transport._Counter('\x00', 1)
self.assertEqual(counter(), '\x01')
self.assertEqual(counter(), '\x02')
[counter() for i in range(252)]
self.assertEqual(counter(), '\xff')
self.assertEqual(counter(), '\x00')
class TransportLoopbackTestCase(unittest.TestCase):
"""
Test the server transport and client transport against each other,
"""
if dependencySkip:
skip = dependencySkip
def _runClientServer(self, mod):
"""
Run an async client and server, modifying each using the mod function
provided. Returns a Deferred called back when both Protocols have
disconnected.
@type mod: C{func}
@rtype: C{defer.Deferred}
"""
factory = MockFactory()
server = transport.SSHServerTransport()
server.factory = factory
factory.startFactory()
server.errors = []
server.receiveError = lambda code, desc: server.errors.append((
code, desc))
client = transport.SSHClientTransport()
client.verifyHostKey = lambda x, y: defer.succeed(None)
client.errors = []
client.receiveError = lambda code, desc: client.errors.append((
code, desc))
client.connectionSecure = lambda: client.loseConnection()
server = mod(server)
client = mod(client)
def check(ignored, server, client):
name = repr([server.supportedCiphers[0],
server.supportedMACs[0],
server.supportedKeyExchanges[0],
server.supportedCompressions[0]])
self.assertEqual(client.errors, [])
self.assertEqual(server.errors, [(
transport.DISCONNECT_CONNECTION_LOST,
"user closed connection")])
if server.supportedCiphers[0] == 'none':
self.assertFalse(server.isEncrypted(), name)
self.assertFalse(client.isEncrypted(), name)
else:
self.assertTrue(server.isEncrypted(), name)
self.assertTrue(client.isEncrypted(), name)
if server.supportedMACs[0] == 'none':
self.assertFalse(server.isVerified(), name)
self.assertFalse(client.isVerified(), name)
else:
self.assertTrue(server.isVerified(), name)
self.assertTrue(client.isVerified(), name)
d = loopback.loopbackAsync(server, client)
d.addCallback(check, server, client)
return d
def test_ciphers(self):
"""
Test that the client and server play nicely together, in all
the various combinations of ciphers.
"""
deferreds = []
for cipher in transport.SSHTransportBase.supportedCiphers + ['none']:
def setCipher(proto):
proto.supportedCiphers = [cipher]
return proto
deferreds.append(self._runClientServer(setCipher))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def test_macs(self):
"""
Like test_ciphers, but for the various MACs.
"""
deferreds = []
for mac in transport.SSHTransportBase.supportedMACs + ['none']:
def setMAC(proto):
proto.supportedMACs = [mac]
return proto
deferreds.append(self._runClientServer(setMAC))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def test_keyexchanges(self):
"""
Like test_ciphers, but for the various key exchanges.
"""
deferreds = []
for kex in transport.SSHTransportBase.supportedKeyExchanges:
def setKeyExchange(proto):
proto.supportedKeyExchanges = [kex]
return proto
deferreds.append(self._runClientServer(setKeyExchange))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
def test_compressions(self):
"""
Like test_ciphers, but for the various compressions.
"""
deferreds = []
for compression in transport.SSHTransportBase.supportedCompressions:
def setCompression(proto):
proto.supportedCompressions = [compression]
return proto
deferreds.append(self._runClientServer(setCompression))
return defer.DeferredList(deferreds, fireOnOneErrback=True)
class RandomNumberTestCase(unittest.TestCase):
"""
Tests for the random number generator L{_getRandomNumber} and private
key generator L{_generateX}.
"""
if dependencySkip:
skip = dependencySkip
def test_usesSuppliedRandomFunction(self):
"""
L{_getRandomNumber} returns an integer constructed directly from the
bytes returned by the random byte generator passed to it.
"""
def random(bytes):
# The number of bytes requested will be the value of each byte
# we return.
return chr(bytes) * bytes
self.assertEqual(
transport._getRandomNumber(random, 32),
4 << 24 | 4 << 16 | 4 << 8 | 4)
def test_rejectsNonByteMultiples(self):
"""
L{_getRandomNumber} raises L{ValueError} if the number of bits
passed to L{_getRandomNumber} is not a multiple of 8.
"""
self.assertRaises(
ValueError,
transport._getRandomNumber, None, 9)
def test_excludesSmall(self):
"""
If the random byte generator passed to L{_generateX} produces bytes
which would result in 0 or 1 being returned, these bytes are
discarded and another attempt is made to produce a larger value.
"""
results = [chr(0), chr(1), chr(127)]
def random(bytes):
return results.pop(0) * bytes
self.assertEqual(
transport._generateX(random, 8),
127)
def test_excludesLarge(self):
"""
If the random byte generator passed to L{_generateX} produces bytes
which would result in C{(2 ** bits) - 1} being returned, these bytes
are discarded and another attempt is made to produce a smaller
value.
"""
results = [chr(255), chr(64)]
def random(bytes):
return results.pop(0) * bytes
self.assertEqual(
transport._generateX(random, 8),
64)
class OldFactoryTestCase(unittest.TestCase):
"""
The old C{SSHFactory.getPublicKeys}() returned mappings of key names to
strings of key blobs and mappings of key names to PyCrypto key objects from
C{SSHFactory.getPrivateKeys}() (they could also be specified with the
C{publicKeys} and C{privateKeys} attributes). This is no longer supported
by the C{SSHServerTransport}, so we warn the user if they create an old
factory.
"""
if dependencySkip:
skip = dependencySkip
def test_getPublicKeysWarning(self):
"""
If the return value of C{getPublicKeys}() isn't a mapping from key
names to C{Key} objects, then warn the user and convert the mapping.
"""
sshFactory = MockOldFactoryPublicKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to strings from"
" getPublicKeys()/publicKeys (in %s) is deprecated. Return "
"a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPublicKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEqual(sshFactory.publicKeys, MockFactory().getPublicKeys())
def test_getPrivateKeysWarning(self):
"""
If the return value of C{getPrivateKeys}() isn't a mapping from key
names to C{Key} objects, then warn the user and convert the mapping.
"""
sshFactory = MockOldFactoryPrivateKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to PyCrypto key objects from"
" getPrivateKeys()/privateKeys (in %s) is deprecated. Return"
" a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPrivateKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEqual(sshFactory.privateKeys,
MockFactory().getPrivateKeys())
def test_publicKeysWarning(self):
"""
If the value of the C{publicKeys} attribute isn't a mapping from key
names to C{Key} objects, then warn the user and convert the mapping.
"""
sshFactory = MockOldFactoryPublicKeys()
sshFactory.publicKeys = sshFactory.getPublicKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to strings from"
" getPublicKeys()/publicKeys (in %s) is deprecated. Return "
"a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPublicKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEqual(sshFactory.publicKeys, MockFactory().getPublicKeys())
def test_privateKeysWarning(self):
"""
If the return value of C{privateKeys} attribute isn't a mapping from
key names to C{Key} objects, then warn the user and convert the
mapping.
"""
sshFactory = MockOldFactoryPrivateKeys()
sshFactory.privateKeys = sshFactory.getPrivateKeys()
self.assertWarns(DeprecationWarning,
"Returning a mapping from strings to PyCrypto key objects from"
" getPrivateKeys()/privateKeys (in %s) is deprecated. Return"
" a mapping from strings to Key objects instead." %
(qual(MockOldFactoryPrivateKeys),),
factory.__file__, sshFactory.startFactory)
self.assertEqual(sshFactory.privateKeys,
MockFactory().getPrivateKeys())
|
ecolitan/fatics
|
venv/lib/python2.7/site-packages/twisted/conch/test/test_transport.py
|
Python
|
agpl-3.0
| 82,188
|
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Adds a filter to a user profile, limiting its access to certain advertisers.
To get user IDs, run get_users.py. To get advertiser IDs, run
get_advertisers.py.
A similar pattern can be applied to set filters limiting site, user role,
and/or campaign access for any user. To get the Filter Criteria Type ID, run
get_user_filter_types.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import dfa
USER_ID = 'INSERT_USER_ID_HERE'
ADVERTISER_ID = 'INSERT_ADVERTISER_ID_HERE'
def main(client, user_id, advertiser_id):
# Initialize appropriate service.
user_service = client.GetService(
'user', 'v1.20', 'https://advertisersapitest.doubleclick.net')
# Retrieve the user who is to be modified.
user = user_service.getUser(user_id)
# Create and configure a user filter.
advertiser_filter = {
# The following field has been filled in to make a filter that allows a
# user to access only the assigned objects.
# This value was determined using get_user_filter_types.py.
'userFilterCriteriaId': '2',
# Because this filter used the criteria type "Assigned" it is necessary
# to specify what advertisers this user has access to. This next step
# would be skipped for the criteria types "All" and "None".
# Create a list of object filters to represent each object the user has
# access to. Since this is an advertiser filter, the list elements
# represent an advertiser each. The size of the list will need to match
# the total number of advertisers the user is assigned.
'objectFilters': [{
'id': advertiser_id
}]
}
# Add the filter to the user.
user['advertiserUserFilter'] = advertiser_filter
# Save the changes made and display a success message.
result = user_service.saveUser(user)
if result:
print 'User with ID \'%s\' was modified.' % result['id']
else:
print 'No user was modified.'
if __name__ == '__main__':
# Initialize client object.
dfa_client = dfa.DfaClient.LoadFromStorage()
main(dfa_client, USER_ID, ADVERTISER_ID)
|
wubr2000/googleads-python-lib
|
examples/dfa/v1_20/add_advertiser_user_filter.py
|
Python
|
apache-2.0
| 2,962
|
# Setup for static-picture-publish.
from distutils.core import setup
# This version number must match the version number in VERSION.
_sppVersion = '0.1'
setup(name='static-picture-publish',
description="""Publish images.""",
long_description="""
Publish images.""",
author='Russell Steicke',
author_email='russells@adelie.cx',
url='http://adelie.cx/static-picture-publish',
version=_sppVersion,
license="GPL",
packages=['static_picture_publish'],
scripts=['bin/static-picture-publish'],
data_files=[('share/man/man1',
['man/static-picture-publish.1.gz']),
('lib/site-python/static_picture_publish/css',
['css/spp-plain.css'] ),
('lib/site-python/static_picture_publish/xsl',
['xsl/spp-dir-plain.xsl',
'xsl/spp-image-plain.xsl'] ),
('lib/site-python/static_picture_publish/images',
['images/tl1.gif',
'images/tl2.gif',
'images/t.gif',
'images/tr.gif',
'images/l.gif',
'images/folder-pics.gif',
'images/r.gif',
'images/bl.gif',
'images/b.gif',
'images/br.gif',
] ),
]
)
|
russells/static-picture-publish
|
setup.py
|
Python
|
gpl-2.0
| 1,416
|
import json
import os
import time
import hashlib
import inspect
import idc
import idautils
import idaapi
from idaapi import Form
from py2neo import authenticate, Graph, Node, Relationship
class ConnectionManagementForm(Form):
def __init__(self,manager):
self.manager = manager
self.conf = manager.get_config()
self.changed = False
Form.__init__(self,
"""Neo4IDA - Manage Neo4j Connection
{form_change}
<#Host#~H~ost:{host}> <#Port#~P~ort:{port}>
<#Username#~U~sername:{username}>
<#Password#~P~assword:{password}>
"""
, {
"form_change": Form.FormChangeCb(self.form_change),
"host":Form.StringInput(swidth=20),
"port":Form.StringInput(swidth=10),
"username":Form.StringInput(swidth=40),
"password":Form.StringInput(swidth=40)
}
)
self.Compile()
self.host.value = self.conf["host"]
self.port.value = self.conf["port"]
self.username.value = self.conf["username"]
self.password.value = self.conf["password"]
self.Execute()
def form_change(self,fid):
if fid == self.host.id:
tmp = self.GetControlValue(self.host)
self.host.value = tmp
self.changed = True
if fid == self.port.id:
tmp = self.GetControlValue(self.port)
self.port.value = tmp
self.changed = True
if fid == self.username.id:
tmp = self.GetControlValue(self.username)
self.username.value = tmp
self.changed = True
if fid == self.password.id:
tmp = self.GetControlValue(self.password)
self.password.value = tmp
self.changed = True
if fid == -2:
if self.changed:
new_conf = {}
new_conf['host'] = self.host.value
new_conf['port'] = self.port.value
new_conf['username'] = self.username.value
new_conf['password'] = self.password.value
self.manager.update_config(new_conf)
self.conf = new_conf
print "Config updated"
self.manager.connect()
self.Close(-1)
class CypherQueryForm(Form):
def __init__(self,manager):
self.manager = manager
self.conf = manager.get_config()
self.changed = False
Form.__init__(self,
"""Neo4IDA - Execute Cypher Query
{form_change}
<#Query#~Q~uery:{query}>
<#Execute Query#~E~xecute:{executeButton}>
"""
, {
"form_change": Form.FormChangeCb(self.form_change),
"query":Form.StringInput(swidth=80),
"executeButton":Form.ButtonInput(self.button_press)
}
)
self.Compile()
self.query.value = "START n=node(*) return n;"
self.Execute()
def form_change(self,fid):
if fid == self.query.id:
query = self.GetControlValue(self.query)
self.query.value = query
if fid == -2:
self.Close(-1)
def button_press(self,fid):
print self.query.value
for i in self.manager.neo.cypher.execute(self.query.value):
print i
class UiAction(idaapi.action_handler_t):
def __init__(self, id, name, tooltip, menuPath, callback, icon):
idaapi.action_handler_t.__init__(self)
self.id = id
self.name = name
self.tooltip = tooltip
self.menuPath = menuPath
self.callback = callback
scriptPath = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
self.icon = idaapi.load_custom_icon(
scriptPath + "/" + "icon" + ".png"
)
def registerAction(self):
action_desc = idaapi.action_desc_t(
self.id,
self.name,
self,
"",
self.tooltip,
self.icon
)
if not idaapi.register_action(action_desc):
return False
if not idaapi.attach_action_to_menu(self.menuPath, self.id, 0):
return False
if not idaapi.attach_action_to_toolbar("AnalysisToolBar", self.id):
return False
return True
def unregisterAction(self):
idaapi.detach_action_from_menu(self.menuPath, self.id)
idaapi.unregister_action(self.id)
def activate(self, ctx):
self.callback(ctx)
return 1
def update(self, ctx):
return idaapi.AST_ENABLE_ALWAYS
class neo4ida_t(idaapi.plugin_t):
flags = 0
comment = "Neo4j graph export and query interface"
help = "Neo4j graph export and query interface"
wanted_name = "Neo4IDA"
wanted_hotkey = ""
def init(self):
self.conf_file = os.path.expanduser("~") + os.path.sep + "neo4ida.json"
config = self.get_config()
if not config:
config = self.create_default_config()
self.connect()
action = UiAction(
id="neo4ida:upload",
name="Upload",
tooltip="Upload to neo4j",
menuPath="Edit/neo4ida/",
callback=self.upload,
icon=""
)
if not action.registerAction():
return 1
action = UiAction(
id="neo4ida:dropdb",
name="Drop Database",
tooltip="Delete all entries in database instance.",
menuPath="Edit/neo4ida/",
callback=self.drop_db,
icon=""
)
if not action.registerAction():
return 1
action = UiAction(
id="neo4ida:config",
name="Configure",
tooltip="Configure neo4j connection details.",
menuPath="Edit/neo4ida/",
callback=self.config_form,
icon=""
)
if not action.registerAction():
return 1
action = UiAction(
id="neo4ida:query",
name="Cypher Query",
tooltip="Execute a Cypher query.",
menuPath="Edit/neo4ida/",
callback=self.query_form,
icon=""
)
if not action.registerAction():
return 1
action = UiAction(
id="neo4ida:browser",
name="Neo4j Browser",
tooltip="Open Neo4j browser.",
menuPath="Edit/neo4ida/",
callback=self.open_browser,
icon=""
)
if not action.registerAction():
return 1
action = UiAction(
id="neo4ida:diff",
name="Binary Diff",
tooltip="Open binary diffing interface.",
menuPath="Edit/neo4ida/",
callback=self.binary_diff,
icon=""
)
if not action.registerAction():
return 1
return idaapi.PLUGIN_KEEP
def connect(self):
conf = self.get_config()
authenticate(conf['host'] + ":" + conf['port'],conf['username'],conf["password"])
try:
self.neo = Graph("http://" + conf['host'] + ":" + conf["port"] + "/db/data")
except:
print "Failed to connect!"
def term(self):
return None
def binary_diff(self,ctf):
print "Open binary diffing interface"
def drop_db(self,ctx):
self.neo.cypher.execute("START n=node(*) detach delete n;")
print "All database nodes and relationships deleted."
def open_browser(self,ctx):
self.neo.open_browser()
def config_form(self,ctx):
ConnectionManagementForm(self)
def query_form(self,ctf):
CypherQueryForm(self)
def upload(self,ctx):
start = time.time()
func_count = 0
bb_count = 0
call_count = 0
target = idaapi.get_root_filename()
hash = idc.GetInputMD5()
tx = self.neo.cypher.begin()
insert_binary = "MERGE (n:Binary {name:{N},hash:{H}}) RETURN n"
insert_func = "MERGE (n:Function {name:{N},start:{S},flags:{F}}) RETURN n"
insert_bb = "MERGE (n:BasicBlock {start:{S}, end:{E}}) RETURN n"
create_relationship = "MATCH (u:Function {name:{N}}), (r:Function {start:{S}}) CREATE (u)-[:CALLS]->(r)"
create_contains = "MATCH (u:BasicBlock {start:{S}}), (f:Function {name:{N}}) CREATE (f)-[:CONTAINS]->(u)"
create_inside = "MATCH (u:Function {start:{S}}), (b:Binary {hash:{H}}) CREATE (f)-[:INSIDE]->(b)"
self.neo.cypher.execute(insert_binary, {"N":target, "H":hash})
self.neo.cypher.execute("CREATE INDEX ON :Function(start)")
#self.neo.cypher.execute("CREATE INDEX ON :Function(name)")
self.neo.cypher.execute("CREATE INDEX ON :BasicBlock(start)")
for f in Functions():
tx.append(create_inside, {"S":f, "H":hash})
callee_name = GetFunctionName(f)
flags = get_flags(f)
type = GetType(f)
if type:
return_type = type.split()[0]
print type
end_return = type.find(' ')
start_args = type.find('(')
print type[end_return +1:start_args]
print type[start_args+1:].split(',')
else:
print GuessType(f)
tx.append(insert_func, {"N": callee_name, "S":f, "F":flags})
func_count += 1
fc = idaapi.FlowChart(idaapi.get_func(f))
for block in fc:
tx.append(insert_bb, {"S":block.startEA,"E":block.endEA})
tx.append(create_contains,{"S":block.startEA,"N":f})
bb_count += 1
tx.process()
tx.commit()
tx = self.neo.cypher.begin()
for f in Functions():
for xref in CodeRefsTo(f,0):
caller_name = GetFunctionName(xref)
if caller_name != '':
tx.append(create_relationship,{"N":caller_name,"S":f})
call_count += 1
tx.process()
tx.commit()
print "Upload ran in: " + str(time.time() - start)
print "Uploaded " + str(func_count) + " functions, " + str(call_count) +" function calls and " + str(bb_count) + " basic blocks."
def run(self):
pass
def update_config(self,new_config):
print "updating config to be: "
print json.dumps(new_config)
os.remove(self.conf_file)
with open(self.conf_file,"w+") as f:
f.write(json.dumps(new_config))
def create_default_config(self):
default_conf = {
"host": "localhost",
"port": "7474",
"username":"neo4j",
"password":"neo4j"
}
with open(self.conf_file,"w+") as f:
f.write(json.dumps(default_conf))
return default_conf
def get_config(self):
try:
with open(self.conf_file,"r") as f:
return json.loads(f.read())
except:
return None
def find_path(self,startFunc, endFunc):
all_paths = ""
print "Finding all paths from " + startFunc + " to " + endFunc
self.neo.cypher.execute(all_paths,{})
def help():
print "Upload: Upload graph to neo instance."
print "Drop Database: Delete all nodes and relationships in the neo4j instance."
print "Configure: Update your connection configuration to the neo4j instance."
print "Cypher Query: Execute arbitary cypher queries."
print "Neo4j Browser: Open the Neo4j web interface in your systems default browser."
print "Binary Diff: placeholer menu item."
def get_args(f):
local_variables = [ ]
arguments = [ ]
current = local_variables
frame = idc.GetFrame(f)
arg_string = ""
if frame == None:
return None
start = idc.GetFirstMember(frame)
end = idc.GetLastMember(frame)
count = 0
max_count = 10000
args_str = ""
while start <= end and count <= max_count:
size = idc.GetMemberSize(frame, start)
count = count + 1
if size == None:
start = start + 1
continue
name = idc.GetMemberName(frame, start)
start += size
if name in [" r", " s"]:
# Skip return address and base pointer
current = arguments
continue
arg_string += " " + name
current.append(name)
if len(arguments) == 0:
arguments.append("void")
return arguments
def get_flags(f):
out = []
flags = idc.GetFunctionFlags(f)
if flags & FUNC_NORET:
out.append("FUNC_NORET")
if flags & FUNC_FAR:
out.append("FUNC_FAR")
if flags & FUNC_LIB:
out.append("FUNC_LIB")
if flags & FUNC_STATIC:
out.append("FUNC_STATIC")
if flags & FUNC_FRAME:
out.append("FUNC_FRAME")
if flags & FUNC_USERFAR:
out.append("FUNC_USERFAR")
if flags & FUNC_HIDDEN:
out.append("FUNC_HIDDEN")
if flags & FUNC_THUNK:
out.append("FUNC_THUNK")
if flags & FUNC_LIB:
out.append("FUNC_BOTTOMBP")
return out
def PLUGIN_ENTRY():
return neo4ida_t()
|
sam-b/ida-scripts
|
neo4ida/neo4ida.py
|
Python
|
unlicense
| 11,112
|
import datetime
import threading
import time
import cherrypy
from cherrypy.lib import cptools, httputil
class Cache(object):
def get(self):
raise NotImplemented
def put(self, obj, size):
raise NotImplemented
def delete(self):
raise NotImplemented
def clear(self):
raise NotImplemented
# ------------------------------- Memory Cache ------------------------------- #
class AntiStampedeCache(dict):
def wait(self, key, timeout=5, debug=False):
"""Return the cached value for the given key, or None.
If timeout is not None (the default), and the value is already
being calculated by another thread, wait until the given timeout has
elapsed. If the value is available before the timeout expires, it is
returned. If not, None is returned, and a sentinel placed in the cache
to signal other threads to wait.
If timeout is None, no waiting is performed nor sentinels used.
"""
value = self.get(key)
if isinstance(value, threading._Event):
if timeout is None:
# Ignore the other thread and recalc it ourselves.
if debug:
cherrypy.log('No timeout', 'TOOLS.CACHING')
return None
# Wait until it's done or times out.
if debug:
cherrypy.log('Waiting up to %s seconds' % timeout, 'TOOLS.CACHING')
value.wait(timeout)
if value.result is not None:
# The other thread finished its calculation. Use it.
if debug:
cherrypy.log('Result!', 'TOOLS.CACHING')
return value.result
# Timed out. Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return None
elif value is None:
# Stick an Event in the slot so other threads wait
# on this one to finish calculating the value.
if debug:
cherrypy.log('Timed out', 'TOOLS.CACHING')
e = threading.Event()
e.result = None
dict.__setitem__(self, key, e)
return value
def __setitem__(self, key, value):
"""Set the cached value for the given key."""
existing = self.get(key)
dict.__setitem__(self, key, value)
if isinstance(existing, threading._Event):
# Set Event.result so other threads waiting on it have
# immediate access without needing to poll the cache again.
existing.result = value
existing.set()
class MemoryCache(Cache):
"""An in-memory cache for varying response content.
Each key in self.store is a URI, and each value is an AntiStampedeCache.
The response for any given URI may vary based on the values of
"selecting request headers"; that is, those named in the Vary
response header. We assume the list of header names to be constant
for each URI throughout the lifetime of the application, and store
that list in self.store[uri].selecting_headers.
The items contained in self.store[uri] have keys which are tuples of request
header values (in the same order as the names in its selecting_headers),
and values which are the actual responses.
"""
maxobjects = 1000
maxobj_size = 100000
maxsize = 10000000
delay = 600
antistampede_timeout = 5
expire_freq = 0.1
debug = False
def __init__(self):
self.clear()
# Run self.expire_cache in a separate daemon thread.
t = threading.Thread(target=self.expire_cache, name='expire_cache')
self.expiration_thread = t
if hasattr(threading.Thread, "daemon"):
# Python 2.6+
t.daemon = True
else:
t.setDaemon(True)
t.start()
def clear(self):
"""Reset the cache to its initial, empty state."""
self.store = {}
self.expirations = {}
self.tot_puts = 0
self.tot_gets = 0
self.tot_hist = 0
self.tot_expires = 0
self.tot_non_modified = 0
self.cursize = 0
def expire_cache(self):
# expire_cache runs in a separate thread which the servers are
# not aware of. It's possible that "time" will be set to None
# arbitrarily, so we check "while time" to avoid exceptions.
# See tickets #99 and #180 for more information.
while time:
now = time.time()
# Must make a copy of expirations so it doesn't change size
# during iteration
for expiration_time, objects in self.expirations.items():
if expiration_time <= now:
for obj_size, uri, sel_header_values in objects:
try:
del self.store[uri][sel_header_values]
self.tot_expires += 1
self.cursize -= obj_size
except KeyError:
# the key may have been deleted elsewhere
pass
del self.expirations[expiration_time]
time.sleep(self.expire_freq)
def get(self):
"""Return the current variant if in the cache, else None."""
request = cherrypy.serving.request
self.tot_gets += 1
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
return None
header_values = [request.headers.get(h, '')
for h in uricache.selecting_headers]
header_values.sort()
variant = uricache.wait(key=tuple(header_values),
timeout=self.antistampede_timeout,
debug=self.debug)
if variant is not None:
self.tot_hist += 1
return variant
def put(self, variant, size):
"""Store the current variant in the cache."""
request = cherrypy.serving.request
response = cherrypy.serving.response
uri = cherrypy.url(qs=request.query_string)
uricache = self.store.get(uri)
if uricache is None:
uricache = AntiStampedeCache()
uricache.selecting_headers = [
e.value for e in response.headers.elements('Vary')]
self.store[uri] = uricache
if len(self.store) < self.maxobjects:
total_size = self.cursize + size
# checks if there's space for the object
if (size < self.maxobj_size and total_size < self.maxsize):
# add to the expirations list
expiration_time = response.time + self.delay
bucket = self.expirations.setdefault(expiration_time, [])
bucket.append((size, uri, uricache.selecting_headers))
# add to the cache
header_values = [request.headers.get(h, '')
for h in uricache.selecting_headers]
header_values.sort()
uricache[tuple(header_values)] = variant
self.tot_puts += 1
self.cursize = total_size
def delete(self):
"""Remove ALL cached variants of the current resource."""
uri = cherrypy.url(qs=cherrypy.serving.request.query_string)
self.store.pop(uri, None)
def get(invalid_methods=("POST", "PUT", "DELETE"), debug=False, **kwargs):
"""Try to obtain cached output. If fresh enough, raise HTTPError(304).
If POST, PUT, or DELETE:
* invalidates (deletes) any cached response for this resource
* sets request.cached = False
* sets request.cacheable = False
else if a cached copy exists:
* sets request.cached = True
* sets request.cacheable = False
* sets response.headers to the cached values
* checks the cached Last-Modified response header against the
current If-(Un)Modified-Since request headers; raises 304
if necessary.
* sets response.status and response.body to the cached values
* returns True
otherwise:
* sets request.cached = False
* sets request.cacheable = True
* returns False
"""
request = cherrypy.serving.request
response = cherrypy.serving.response
if not hasattr(cherrypy, "_cache"):
# Make a process-wide Cache object.
cherrypy._cache = kwargs.pop("cache_class", MemoryCache)()
# Take all remaining kwargs and set them on the Cache object.
for k, v in kwargs.items():
setattr(cherrypy._cache, k, v)
cherrypy._cache.debug = debug
# POST, PUT, DELETE should invalidate (delete) the cached copy.
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.10.
if request.method in invalid_methods:
if debug:
cherrypy.log('request.method %r in invalid_methods %r' %
(request.method, invalid_methods), 'TOOLS.CACHING')
cherrypy._cache.delete()
request.cached = False
request.cacheable = False
return False
if 'no-cache' in [e.value for e in request.headers.elements('Pragma')]:
request.cached = False
request.cacheable = True
return False
cache_data = cherrypy._cache.get()
request.cached = bool(cache_data)
request.cacheable = not request.cached
if request.cached:
# Serve the cached copy.
max_age = cherrypy._cache.delay
for v in [e.value for e in request.headers.elements('Cache-Control')]:
atoms = v.split('=', 1)
directive = atoms.pop(0)
if directive == 'max-age':
if len(atoms) != 1 or not atoms[0].isdigit():
raise cherrypy.HTTPError(400, "Invalid Cache-Control header")
max_age = int(atoms[0])
break
elif directive == 'no-cache':
if debug:
cherrypy.log('Ignoring cache due to Cache-Control: no-cache',
'TOOLS.CACHING')
request.cached = False
request.cacheable = True
return False
if debug:
cherrypy.log('Reading response from cache', 'TOOLS.CACHING')
s, h, b, create_time = cache_data
age = int(response.time - create_time)
if (age > max_age):
if debug:
cherrypy.log('Ignoring cache due to age > %d' % max_age,
'TOOLS.CACHING')
request.cached = False
request.cacheable = True
return False
# Copy the response headers. See http://www.cherrypy.org/ticket/721.
response.headers = rh = httputil.HeaderMap()
for k in h:
dict.__setitem__(rh, k, dict.__getitem__(h, k))
# Add the required Age header
response.headers["Age"] = str(age)
try:
# Note that validate_since depends on a Last-Modified header;
# this was put into the cached copy, and should have been
# resurrected just above (response.headers = cache_data[1]).
cptools.validate_since()
except cherrypy.HTTPRedirect, x:
if x.status == 304:
cherrypy._cache.tot_non_modified += 1
raise
# serve it & get out from the request
response.status = s
response.body = b
else:
if debug:
cherrypy.log('request is not cached', 'TOOLS.CACHING')
return request.cached
def tee_output():
request = cherrypy.serving.request
if 'no-store' in request.headers.values('Cache-Control'):
return
def tee(body):
"""Tee response.body into a list."""
if ('no-cache' in response.headers.values('Pragma') or
'no-store' in response.headers.values('Cache-Control')):
for chunk in body:
yield chunk
return
output = []
for chunk in body:
output.append(chunk)
yield chunk
# save the cache data
body = ''.join(output)
cherrypy._cache.put((response.status, response.headers or {},
body, response.time), len(body))
response = cherrypy.serving.response
response.body = tee(response.body)
def expires(secs=0, force=False, debug=False):
"""Tool for influencing cache mechanisms using the 'Expires' header.
'secs' must be either an int or a datetime.timedelta, and indicates the
number of seconds between response.time and when the response should
expire. The 'Expires' header will be set to (response.time + secs).
If 'secs' is zero, the 'Expires' header is set one year in the past, and
the following "cache prevention" headers are also set:
'Pragma': 'no-cache'
'Cache-Control': 'no-cache, must-revalidate'
If 'force' is False (the default), the following headers are checked:
'Etag', 'Last-Modified', 'Age', 'Expires'. If any are already present,
none of the above response headers are set.
"""
response = cherrypy.serving.response
headers = response.headers
cacheable = False
if not force:
# some header names that indicate that the response can be cached
for indicator in ('Etag', 'Last-Modified', 'Age', 'Expires'):
if indicator in headers:
cacheable = True
break
if not cacheable and not force:
if debug:
cherrypy.log('request is not cacheable', 'TOOLS.EXPIRES')
else:
if debug:
cherrypy.log('request is cacheable', 'TOOLS.EXPIRES')
if isinstance(secs, datetime.timedelta):
secs = (86400 * secs.days) + secs.seconds
if secs == 0:
if force or ("Pragma" not in headers):
headers["Pragma"] = "no-cache"
if cherrypy.serving.request.protocol >= (1, 1):
if force or "Cache-Control" not in headers:
headers["Cache-Control"] = "no-cache, must-revalidate"
# Set an explicit Expires date in the past.
expiry = httputil.HTTPDate(1169942400.0)
else:
expiry = httputil.HTTPDate(response.time + secs)
if force or "Expires" not in headers:
headers["Expires"] = expiry
|
imajes/Sick-Beard
|
cherrypy/lib/caching.py
|
Python
|
gpl-3.0
| 15,405
|
import kivy
kivy.require('1.9.1')
from kivy.uix.boxlayout import BoxLayout
from kivy.properties import NumericProperty, ObjectProperty
class StatusBar(BoxLayout):
counter = NumericProperty(0)
previous_counter = 0
def on_counter(self,instnace,value):
if value == 0:
self.msg_label.text = 'drawing space cleared'
elif value -1 == self.__class__.previous_counter:
self.msg_label.text = 'widger added'
elif value + 1 == StatusBar.previous_counter:
self.msg_label.text = 'widget removed'
self.__class__.previous_counter = value
|
pimier15/PyGUI
|
Kivy/Kivy/Bk_Interractive/My/C3/Dragging/statusbar.py
|
Python
|
mit
| 613
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
from hypothesis import given
import caffe2.python.hypothesis_test_util as hu
import hypothesis.strategies as st
import numpy as np
class TestClipTensorByScalingOp(hu.HypothesisTestCase):
@given(n=st.integers(5, 8), d=st.integers(2, 4),
threshold=st.floats(0.1, 10),
additional_threshold=st.floats(0.1, 10),
use_additional_threshold=st.booleans(),
inplace=st.booleans(),
**hu.gcs_cpu_only)
def test_clip_tensor_by_scaling(self, n, d, threshold, additional_threshold,
use_additional_threshold, inplace, gc, dc):
tensor = np.random.rand(n, d).astype(np.float32)
val = np.array(np.linalg.norm(tensor))
additional_threshold = np.array([additional_threshold]).astype(np.float32)
def clip_tensor_by_scaling_ref(tensor_data, val_data,
additional_threshold=None):
if additional_threshold is not None:
final_threshold = threshold * additional_threshold
else:
final_threshold = threshold
if val_data > final_threshold:
ratio = final_threshold / float(val_data)
tensor_data = tensor_data * ratio
return [tensor_data]
op = core.CreateOperator(
"ClipTensorByScaling",
["tensor", "val"] if not use_additional_threshold else (
["tensor", "val", "additional_threshold"]),
['Y'] if not inplace else ["tensor"],
threshold=threshold,
)
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[tensor, val] if not use_additional_threshold else (
[tensor, val, additional_threshold]),
reference=clip_tensor_by_scaling_ref,
)
if __name__ == "__main__":
import unittest
unittest.main()
|
xzturn/caffe2
|
caffe2/python/operator_test/clip_tensor_op_test.py
|
Python
|
apache-2.0
| 2,109
|
from flask import Flask
from flask import request, render_template, jsonify
from src.common.NER_utils import transform_dataset_web
from src.CRF_NER.CRF_NER import parse_commands
import pycrfsuite
model = "1_nbr"
def init(filename = "model.txt"):
with open(filename) as f:
line = f.read()
tokens = line.strip().split(' ')
label = tokens[0]
params = tokens[1:]
return label, params
_, params = init()
app = Flask(__name__)
def wrap_text(tag, token):
if tag == "O":
return token
return "<{} {}>".format(tag, token)
@app.route("/")
def my_form():
return render_template("my-form.html")
@app.route("/annotate", methods=['POST', 'GET'])
def my_for_post():
text = request.args.get('sentence', 0, type=str)
tagger = pycrfsuite.Tagger()
tagger.open(model+".crfmodel")
features, tokens = transform_dataset_web(text, params, merge = "supertype")
print(features)
predictions = tagger.tag(features)
print(predictions)
tagger.close()
output = " ".join(wrap_text(tag, token) for tag, token in zip(predictions, tokens))
return jsonify(result=output)
if __name__ == "__main__":
app.run()
|
nguyeho7/CZ_NER
|
src/webservice/server.py
|
Python
|
mit
| 1,188
|
# -*- coding: utf-8 -*-
# flake8: noqa
import datetime
import logging
import json
import os.path
import prison
import urllib.parse
from .util import EAException
from .util import lookup_es_key
from .util import ts_add
kibana_default_timedelta = datetime.timedelta(minutes=10)
kibana5_kibana6_versions = frozenset(['5.6', '6.0', '6.1', '6.2', '6.3', '6.4', '6.5', '6.6', '6.7', '6.8'])
kibana7_versions = frozenset(['7.0', '7.1', '7.2', '7.3'])
def generate_kibana_discover_url(rule, match):
''' Creates a link for a kibana discover app. '''
discover_app_url = rule.get('kibana_discover_app_url')
if not discover_app_url:
logging.warning(
'Missing kibana_discover_app_url for rule %s' % (
rule.get('name', '<MISSING NAME>')
)
)
return None
kibana_version = rule.get('kibana_discover_version')
if not kibana_version:
logging.warning(
'Missing kibana_discover_version for rule %s' % (
rule.get('name', '<MISSING NAME>')
)
)
return None
index = rule.get('kibana_discover_index_pattern_id')
if not index:
logging.warning(
'Missing kibana_discover_index_pattern_id for rule %s' % (
rule.get('name', '<MISSING NAME>')
)
)
return None
columns = rule.get('kibana_discover_columns', ['_source'])
filters = rule.get('filter', [])
if 'query_key' in rule:
query_keys = rule.get('compound_query_key', [rule['query_key']])
else:
query_keys = []
timestamp = lookup_es_key(match, rule['timestamp_field'])
timeframe = rule.get('timeframe', kibana_default_timedelta)
from_timedelta = rule.get('kibana_discover_from_timedelta', timeframe)
from_time = ts_add(timestamp, -from_timedelta)
to_timedelta = rule.get('kibana_discover_to_timedelta', timeframe)
to_time = ts_add(timestamp, to_timedelta)
if kibana_version in kibana5_kibana6_versions:
globalState = kibana6_disover_global_state(from_time, to_time)
appState = kibana_discover_app_state(index, columns, filters, query_keys, match)
elif kibana_version in kibana7_versions:
globalState = kibana7_disover_global_state(from_time, to_time)
appState = kibana_discover_app_state(index, columns, filters, query_keys, match)
else:
logging.warning(
'Unknown kibana discover application version %s for rule %s' % (
kibana_version,
rule.get('name', '<MISSING NAME>')
)
)
return None
return "%s?_g=%s&_a=%s" % (
os.path.expandvars(discover_app_url),
urllib.parse.quote(globalState),
urllib.parse.quote(appState)
)
def kibana6_disover_global_state(from_time, to_time):
return prison.dumps( {
'refreshInterval': {
'pause': True,
'value': 0
},
'time': {
'from': from_time,
'mode': 'absolute',
'to': to_time
}
} )
def kibana7_disover_global_state(from_time, to_time):
return prison.dumps( {
'filters': [],
'refreshInterval': {
'pause': True,
'value': 0
},
'time': {
'from': from_time,
'to': to_time
}
} )
def kibana_discover_app_state(index, columns, filters, query_keys, match):
app_filters = []
if filters:
bool_filter = { 'must': filters }
app_filters.append( {
'$state': {
'store': 'appState'
},
'bool': bool_filter,
'meta': {
'alias': 'filter',
'disabled': False,
'index': index,
'key': 'bool',
'negate': False,
'type': 'custom',
'value': json.dumps(bool_filter, separators=(',', ':'))
},
} )
for query_key in query_keys:
query_value = lookup_es_key(match, query_key)
if query_value is None:
app_filters.append( {
'$state': {
'store': 'appState'
},
'exists': {
'field': query_key
},
'meta': {
'alias': None,
'disabled': False,
'index': index,
'key': query_key,
'negate': True,
'type': 'exists',
'value': 'exists'
}
} )
else:
app_filters.append( {
'$state': {
'store': 'appState'
},
'meta': {
'alias': None,
'disabled': False,
'index': index,
'key': query_key,
'negate': False,
'params': {
'query': query_value,
'type': 'phrase'
},
'type': 'phrase',
'value': str(query_value)
},
'query': {
'match': {
query_key: {
'query': query_value,
'type': 'phrase'
}
}
}
} )
return prison.dumps( {
'columns': columns,
'filters': app_filters,
'index': index,
'interval': 'auto'
} )
|
Yelp/elastalert
|
elastalert/kibana_discover.py
|
Python
|
apache-2.0
| 5,644
|
"""
(C) 2013 Humarobotics
Provides classes for quick threading of object methods
In your class, just add this line to the __init__:
self.post=Post(self)
You can now call any object methods with object.post.method(args)
The Thread object is returned.
"""
from threading import Thread
class PostThread(Thread):
""" Object that manages the threaded execution of a given function """
def __init__(self,func):
""" Creates the thread object with the method to be called """
Thread.__init__(self)
self.func=func
self.isRunning=True
self.result=None
def execute(self,*args,**kwargs):
""" Store the method call arguments and start the thread, returns the thread object to the caller """
self.args=args
self.kwargs=kwargs
self.start()
return self
def run(self):
""" Thread execution, call the function with saved arguments, saves result and change flag at the end """
self.result=self.func(*self.args,**self.kwargs)
self.isRunning=False
class Post:
""" Object that provides threaded calls to its parent object methods """
def __init__(self,parent):
self.parent=parent
def __getattr__(self,attr):
""" Find the method asked for in parent object, encapsulate in a PostThread object and send back pointer to execution function"""
try:
func=getattr(self.parent,attr)
post_thread=PostThread(func)
return post_thread.execute
except:
raise Exception("ERROR: Post call on %s: method %s not found"%(str(self.parent),attr))
if __name__=="__main__":
class Dummy:
def __init__(self):
self.post=Post(self)
def do(self,param):
import time
print "Doing... param="+str(param)
time.sleep(2)
print "Done"
return param
dummy=Dummy()
dummy.do("direct1")
dummy.post.do("post1")
dummy.post.do("post2")
t3=dummy.post.do("post3")
t3.join()
print t3.result
print "Finished"
|
tavallaie/RoboDynamixel
|
dxl/post_threading.py
|
Python
|
mit
| 2,176
|
#!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import os, sys, imp
sys.path.append(os.path.join(os.path.dirname(__file__), "scripts"))
import TestUtil
testGroups = []
languages = [ "cpp", "java", "python", "ruby", "php", "js" ]
if TestUtil.isWin32():
languages.append("csharp")
if TestUtil.isDarwin():
languages.append("objective-c")
for d in languages:
filename = os.path.abspath(os.path.join(os.path.dirname(__file__), d, "allTests.py"))
f = open(filename, "r")
current_mod = imp.load_module("allTests", f, filename, (".py", "r", imp.PY_SOURCE))
f.close()
tests = []
tests = [ (os.path.join(d, "test", x), y) for x, y in current_mod.tests ]
if len(tests) > 0:
testGroups.extend(tests)
TestUtil.run(testGroups, root=True)
|
elijah513/ice
|
allTests.py
|
Python
|
gpl-2.0
| 1,083
|
from netfields import InetAddressField, CidrAddressField
from django.db import models
from django.core.exceptions import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from nodeshot.core.base.models import BaseAccessLevel
from ..managers import NetAccessLevelManager
from choices import IP_PROTOCOLS
class Ip(BaseAccessLevel):
""" IP Address Model """
interface = models.ForeignKey('net.Interface', verbose_name=_('interface'))
address = InetAddressField(verbose_name=_('ip address'), unique=True, db_index=True)
protocol = models.CharField(_('IP Protocol Version'), max_length=4, choices=IP_PROTOCOLS, default=IP_PROTOCOLS[0][0], blank=True)
netmask = CidrAddressField(_('netmask (CIDR, eg: 10.40.0.0/24)'), blank=True, null=True)
objects = NetAccessLevelManager()
class Meta:
app_label = 'net'
permissions = (('can_view_ip', 'Can view ip'),)
verbose_name = _('ip address')
verbose_name_plural = _('ip addresses')
def __unicode__(self):
return '%s: %s' % (self.protocol, self.address)
def clean(self, *args, **kwargs):
""" TODO """
# netaddr.IPAddress('10.40.2.1') in netaddr.IPNetwork('10.40.0.0/24')
pass
def save(self, *args, **kwargs):
"""
Determines ip protocol version automatically.
Stores address in interface shortcuts for convenience.
"""
self.protocol = 'ipv%d' % self.address.version
# save
super(Ip, self).save(*args, **kwargs)
# TODO: do we really need this?
# save shortcut on interfaces
#ip_cached_list = self.interface.ip_addresses
## if not present in interface shorctus add it to the list
#if str(self.address) not in ip_cached_list:
# # recalculate cached_ip_list
# recalculated_ip_cached_list = []
# for ip in self.interface.ip_set.all():
# recalculated_ip_cached_list.append(str(ip.address))
# # rebuild string in format "<ip_1>, <ip_2>"
# self.interface.data['ip_addresses'] = recalculated_ip_cached_list
# self.interface.save()
@property
def owner(self):
return self.interface.owner
if 'grappelli' in settings.INSTALLED_APPS:
@staticmethod
def autocomplete_search_fields():
return ('address__icontains',)
|
sephiroth6/nodeshot
|
nodeshot/networking/net/models/ip.py
|
Python
|
gpl-3.0
| 2,430
|
import mysql.connector
class DB(object):
def __init__(self, settings):
self.conn = None
self.settings = settings
def get_db_connection(self):
if self.conn is None:
self.conn = mysql.connector.connect(
user=self.settings['db']['username'],
password=self.settings['db']['password'],
host=self.settings['db']['host'],
database=self.settings['db']['database']
)
return self.conn
def query(self, query):
self.conn = self.get_db_connection()
cursor = self.conn.cursor()
results = ''
try:
cursor.execute(query)
#cursor.executemany(query)
results = cursor.fetchall()
except:
print("[!] error: unable to fecth data")
finally:
cursor.close()
#self.close_db_connection()
return results
def close_db_connection(self):
self.conn = self.get_db_connection()
self.conn.close()
def clear_database(self):
self.conn = self.get_db_connection()
cursor = self.conn.cursor()
print('')
print('Cleaning database')
print('Tables: [header, site, header_value, header_name]')
print('')
db_tables = [
'DELETE FROM header WHERE header_id>0;',
'DELETE FROM site WHERE site_id>0;',
'DELETE FROM header_value WHERE header_value_id>0;',
'DELETE FROM header_name WHERE header_name_id>0;'
]
for command in db_tables:
cursor.execute(command)
self.conn.commit()
cursor.close()
def save(self, command, table_name, table):
self.conn = self.get_db_connection()
cursor = self.conn.cursor()
print('Table: {}').format(table_name)
if type(table) is list:
for x in table:
cursor.execute(command, tuple(x))
elif type(table) is dict:
for x in table.items():
cursor.execute(command, x)
self.conn.commit()
cursor.close()
def populate_mysql(self,
site_table,
header_name_table,
header_value_table,
header_table
):
self.clear_database()
print('Populating database...')
tables = [
[
'INSERT INTO `site` (`site_id`, `site`, `url`, `code`) VALUES (%s, %s, %s, %s)',
'site',
site_table
],
[
'INSERT INTO `header_value` (`value`, `header_value_id`) VALUES (%s, %s)',
'header_value',
header_value_table
],
[
'INSERT INTO `header_name` (`name`, `header_name_id`) VALUES (%s, %s)',
'header_name',
header_name_table
],
[
'INSERT INTO `header` (`site_id`, `header_name_id`, `header_value_id`) VALUES (%s, %s, %s)',
'header',
header_table
]
]
for command, table_name, table in tables:
self.save(command, table_name, table)
self.close_db_connection()
|
amenezes/secureheaders
|
lib/database/db.py
|
Python
|
gpl-2.0
| 3,252
|
# -*- coding: utf-8 -*-
import functools
import re
from flask.ext import login
from flask.ext import oauth
from google.appengine.api import users
from google.appengine.ext import ndb
import flask
import config
import model
import util
from main import app
_signals = flask.signals.Namespace()
###############################################################################
# Flask Login
###############################################################################
login_manager = login.LoginManager()
class AnonymousUser(login.AnonymousUserMixin):
id = 0
admin = False
name = 'Anonymous'
user_db = None
def key(self):
return None
login_manager.anonymous_user = AnonymousUser
class FlaskUser(AnonymousUser):
def __init__(self, user_db):
self.user_db = user_db
self.id = user_db.key.id()
self.name = user_db.name
self.admin = user_db.admin
def key(self):
return self.user_db.key.urlsafe()
def get_id(self):
return self.user_db.key.urlsafe()
def is_authenticated(self):
return True
def is_active(self):
return self.user_db.active
def is_anonymous(self):
return False
@login_manager.user_loader
def load_user(key):
user_db = ndb.Key(urlsafe=key).get()
if user_db:
return FlaskUser(user_db)
return None
login_manager.init_app(app)
def current_user_id():
return login.current_user.id
def current_user_key():
return login.current_user.user_db.key if login.current_user.user_db else None
def current_user_db():
return login.current_user.user_db
def is_logged_in():
return login.current_user.id != 0
###############################################################################
# Decorators
###############################################################################
def login_required(f):
@functools.wraps(f)
def decorated_function(*args, **kws):
if is_logged_in():
return f(*args, **kws)
if flask.request.path.startswith('/_s/'):
return flask.abort(401)
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return decorated_function
def admin_required(f):
@functools.wraps(f)
def decorated_function(*args, **kws):
if is_logged_in() and current_user_db().admin:
return f(*args, **kws)
if not is_logged_in() and flask.request.path.startswith('/_s/'):
return flask.abort(401)
if not is_logged_in():
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return flask.abort(403)
return decorated_function
permission_registered = _signals.signal('permission-registered')
def permission_required(permission=None):
def permission_decorator(f):
# default to decorated function name as permission
perm = permission or f.func_name
permission_registered.send(f, permission=perm)
@functools.wraps(f)
def decorated_function(*args, **kws):
if is_logged_in() and current_user_db().has_permission(perm):
return f(*args, **kws)
if not is_logged_in():
if flask.request.path.startswith('/_s/'):
return flask.abort(401)
return flask.redirect(flask.url_for('signin', next=flask.request.url))
return flask.abort(403)
return decorated_function
return permission_decorator
###############################################################################
# Sign in stuff
###############################################################################
@app.route('/login/')
@app.route('/signin/')
def signin():
next_url = util.get_next_url()
if flask.url_for('signin') in next_url:
next_url = flask.url_for('welcome')
google_signin_url = flask.url_for('signin_google', next=next_url)
twitter_signin_url = flask.url_for('signin_twitter', next=next_url)
facebook_signin_url = flask.url_for('signin_facebook', next=next_url)
return flask.render_template(
'signin.html',
title='Please sign in',
html_class='signin',
google_signin_url=google_signin_url,
twitter_signin_url=twitter_signin_url,
facebook_signin_url=facebook_signin_url,
next_url=next_url,
)
@app.route('/signout/')
def signout():
login.logout_user()
flask.flash(u'You have been signed out.', category='success')
return flask.redirect(flask.url_for('welcome'))
###############################################################################
# Google
###############################################################################
@app.route('/signin/google/')
def signin_google():
google_url = users.create_login_url(
flask.url_for('google_authorized', next=util.get_next_url())
)
return flask.redirect(google_url)
@app.route('/_s/callback/google/authorized/')
def google_authorized():
google_user = users.get_current_user()
if google_user is None:
flask.flash(u'You denied the request to sign in.')
return flask.redirect(util.get_next_url())
user_db = retrieve_user_from_google(google_user)
return signin_user_db(user_db)
def retrieve_user_from_google(google_user):
auth_id = 'federated_%s' % google_user.user_id()
user_db = model.User.retrieve_one_by('auth_ids', auth_id)
if user_db:
if not user_db.admin and users.is_current_user_admin():
user_db.admin = True
user_db.put()
return user_db
return create_user_db(
auth_id,
re.sub(r'_+|-+|\.+', ' ', google_user.email().split('@')[0]).title(),
google_user.email(),
google_user.email(),
admin=users.is_current_user_admin(),
)
###############################################################################
# Twitter
###############################################################################
twitter_oauth = oauth.OAuth()
twitter = twitter_oauth.remote_app(
'twitter',
base_url='https://api.twitter.com/1.1/',
request_token_url='https://api.twitter.com/oauth/request_token',
access_token_url='https://api.twitter.com/oauth/access_token',
authorize_url='https://api.twitter.com/oauth/authorize',
consumer_key=config.CONFIG_DB.twitter_consumer_key,
consumer_secret=config.CONFIG_DB.twitter_consumer_secret,
)
@app.route('/_s/callback/twitter/oauth-authorized/')
@twitter.authorized_handler
def twitter_authorized(resp):
if resp is None:
flask.flash(u'You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (
resp['oauth_token'],
resp['oauth_token_secret']
)
user_db = retrieve_user_from_twitter(resp)
return signin_user_db(user_db)
@twitter.tokengetter
def get_twitter_token():
return flask.session.get('oauth_token')
@app.route('/signin/twitter/')
def signin_twitter():
flask.session.pop('oauth_token', None)
try:
return twitter.authorize(
callback=flask.url_for('twitter_authorized',
next=util.get_next_url()),
)
except:
flask.flash(
'Something went wrong with Twitter sign in. Please try again.',
category='danger',
)
return flask.redirect(flask.url_for('signin', next=util.get_next_url()))
def retrieve_user_from_twitter(response):
auth_id = 'twitter_%s' % response['user_id']
user_db = model.User.retrieve_one_by('auth_ids', auth_id)
if user_db:
return user_db
return create_user_db(
auth_id,
response['screen_name'],
response['screen_name'],
)
###############################################################################
# Facebook
###############################################################################
facebook_oauth = oauth.OAuth()
facebook = facebook_oauth.remote_app(
'facebook',
base_url='https://graph.facebook.com/',
request_token_url=None,
access_token_url='/oauth/access_token',
authorize_url='https://www.facebook.com/dialog/oauth',
consumer_key=config.CONFIG_DB.facebook_app_id,
consumer_secret=config.CONFIG_DB.facebook_app_secret,
request_token_params={'scope': 'email'},
)
@app.route('/_s/callback/facebook/oauth-authorized/')
@facebook.authorized_handler
def facebook_authorized(resp):
if resp is None:
flask.flash(u'You denied the request to sign in.')
return flask.redirect(util.get_next_url())
flask.session['oauth_token'] = (resp['access_token'], '')
me = facebook.get('/me')
user_db = retrieve_user_from_facebook(me.data)
return signin_user_db(user_db)
@facebook.tokengetter
def get_facebook_oauth_token():
return flask.session.get('oauth_token')
@app.route('/signin/facebook/')
def signin_facebook():
return facebook.authorize(callback=flask.url_for('facebook_authorized',
next=util.get_next_url(),
_external=True),
)
def retrieve_user_from_facebook(response):
auth_id = 'facebook_%s' % response['id']
user_db = model.User.retrieve_one_by('auth_ids', auth_id)
if user_db:
return user_db
return create_user_db(
auth_id,
response['name'],
response['username'] if 'username' in response else response['id'],
response['email'],
)
###############################################################################
# Helpers
###############################################################################
def create_user_db(auth_id, name, username, email='', **params):
username = re.sub(r'_+|-+|\s+', '.', username.split('@')[0].lower().strip())
new_username = username
n = 1
while model.User.retrieve_one_by('username', new_username) is not None:
new_username = '%s%d' % (username, n)
n += 1
user_db = model.User(
name=name,
email=email.lower(),
username=new_username,
auth_ids=[auth_id],
**params
)
user_db.put()
return user_db
@ndb.toplevel
def signin_user_db(user_db):
if not user_db:
return flask.redirect(flask.url_for('signin'))
flask_user_db = FlaskUser(user_db)
if login.login_user(flask_user_db):
user_db.put_async()
flask.flash('Hello %s, welcome to %s.' % (
user_db.name, config.CONFIG_DB.brand_name,
), category='success')
return flask.redirect(util.get_next_url())
else:
flask.flash('Sorry, but you could not sign in.', category='danger')
return flask.redirect(flask.url_for('signin'))
|
jaja14/lab5
|
main/auth.py
|
Python
|
mit
| 10,147
|
# Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"PACKS_PACK_NAME",
"PACK_REF_WHITELIST_REGEX",
"PACK_RESERVED_CHARACTERS",
"PACK_VERSION_SEPARATOR",
"PACK_VERSION_REGEX",
"ST2_VERSION_REGEX",
"SYSTEM_PACK_NAME",
"PACKS_PACK_NAME",
"LINUX_PACK_NAME",
"SYSTEM_PACK_NAMES",
"CHATOPS_PACK_NAME",
"USER_PACK_NAME_BLACKLIST",
"BASE_PACK_REQUIREMENTS",
"MANIFEST_FILE_NAME",
"CONFIG_SCHEMA_FILE_NAME",
]
# Prefix for render context w/ config
PACK_CONFIG_CONTEXT_KV_PREFIX = "config_context"
# A list of allowed characters for the pack name
PACK_REF_WHITELIST_REGEX = r"^[a-z0-9_]+$"
# Check for a valid semver string
PACK_VERSION_REGEX = r"^(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)\.(?:0|[1-9]\d*)(?:-[\da-z\-]+(?:\.[\da-z\-]+)*)?(?:\+[\da-z\-]+(?:\.[\da-z\-]+)*)?$" # noqa
# Special characters which can't be used in pack names
PACK_RESERVED_CHARACTERS = ["."]
# Version sperator when version is supplied in pack name
# Example: libcloud@1.0.1
PACK_VERSION_SEPARATOR = "="
# Check for st2 version in engines
ST2_VERSION_REGEX = r"^((>?>|>=|=|<=|<?<)\s*[0-9]+\.[0-9]+\.[0-9]+?(\s*,)?\s*)+$"
# Name used for system pack
SYSTEM_PACK_NAME = "core"
# Name used for pack management pack
PACKS_PACK_NAME = "packs"
# Name used for linux pack
LINUX_PACK_NAME = "linux"
# Name of the default pack
DEFAULT_PACK_NAME = "default"
# Name of the chatops pack
CHATOPS_PACK_NAME = "chatops"
# A list of system pack names
SYSTEM_PACK_NAMES = [
CHATOPS_PACK_NAME,
SYSTEM_PACK_NAME,
PACKS_PACK_NAME,
LINUX_PACK_NAME,
]
# A list of pack names which can't be used by user-supplied packs
USER_PACK_NAME_BLACKLIST = [SYSTEM_PACK_NAME, PACKS_PACK_NAME]
# Python requirements which are common to all the packs and are installed into the Python pack
# sandbox (virtualenv)
BASE_PACK_REQUIREMENTS = ["six>=1.9.0,<2.0"]
# Name of the pack manifest file
MANIFEST_FILE_NAME = "pack.yaml"
# File name for the config schema file
CONFIG_SCHEMA_FILE_NAME = "config.schema.yaml"
|
nzlosh/st2
|
st2common/st2common/constants/pack.py
|
Python
|
apache-2.0
| 2,612
|
'''Helper functions for rosenbrock optimization examples.'''
import climate
import downhill
import numpy as np
import theano
from theano.sandbox.rng_mrg import MRG_RandomStreams as RandomStreams
climate.enable_default_logging()
COLORS = ('#d62728 #1f77b4 #2ca02c #9467bd #ff7f0e '
'#e377c2 #8c564b #bcbd22 #7f7f7f #17becf').split()
def build(algo, init):
'''Build and return an optimizer for the rosenbrock function.
In downhill, an optimizer can be constructed using the build() top-level
function. This function requires several Theano quantities such as the loss
being optimized and the parameters to update during optimization.
'''
x = theano.shared(np.array(init, 'f'), name='x')
n = 0.1 * RandomStreams().normal((len(init) - 1, ))
monitors = []
if len(init) == 2:
# this gives us access to the x and y locations during optimization.
monitors.extend([('x', x[:-1].sum()), ('y', x[1:].sum())])
return downhill.build(
algo,
loss=(n + 100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2).sum(),
params=[x],
inputs=[],
monitors=monitors,
monitor_gradients=True)
def build_and_trace(algo, init, limit=100, **kwargs):
'''Run an optimizer on the rosenbrock function. Return xs, ys, and losses.
In downhill, optimization algorithms can be iterated over to progressively
minimize the loss. At each iteration, the optimizer yields a dictionary of
monitor values that were computed during that iteration. Here we build an
optimizer and then run it for a fixed number of iterations.
'''
kw = dict(min_improvement=0, patience=0, max_gradient_norm=100)
kw.update(kwargs)
xs, ys, loss = [], [], []
for tm, _ in build(algo, init).iterate([[]], **kw):
if len(init) == 2:
xs.append(tm['x'])
ys.append(tm['y'])
loss.append(tm['loss'])
if len(loss) == limit:
break
# Return the optimization up to any failure of patience.
return xs[:-9], ys[:-9], loss[-9]
def test(algos, n=10, init=[-1.1, 0], limit=100):
'''Run several optimizers for comparison.
Each optimizer is run a fixed number of times with random hyperparameter
values, and the results are yielded back to the caller (often stored in a
dictionary).
Returns
-------
results : sequence of (key, value) pairs
A sequence of results from running tests. Each result contains a "key"
that describes the test run and a "value" that contains the results from
the run. The key is a tuple containing (a) the algorithm, (b) the
learning rate, (c) the momentum, (d) the RMS halflife, and (e) the RMS
regularizer. The value is a tuple containing the (a) x-values and (b)
y-values during the optimization, and (c) the loss value. (The x- and
y-value are only non-empty for 2D experiments.)
'''
for algo in algos:
for _ in range(n):
mu = max(0, np.random.uniform(0, 1.2) - 0.2)
rate = np.exp(np.random.uniform(-8, 0))
half = int(np.exp(np.random.uniform(0, 4)))
reg = np.exp(np.random.uniform(-12, 0))
yield (algo, rate, mu, half, reg), build_and_trace(
algo, init, limit, momentum=mu, learning_rate=rate,
rms_halflife=half, rms_regularizer=reg)
|
masterkeywikz/seq2graph
|
src/downhill-0.2.2/examples/rosenbrock.py
|
Python
|
mit
| 3,403
|
# -*- coding: utf-8 -*-
from docpool.api.testing import DOCPOOL_API_ACCEPTANCE_TESTING # noqa
from plone.app.testing import ROBOT_TEST_LEVEL
from plone.testing import layered
import os
import robotsuite
import unittest
def test_suite():
suite = unittest.TestSuite()
current_dir = os.path.abspath(os.path.dirname(__file__))
robot_dir = os.path.join(current_dir, 'robot')
robot_tests = [
os.path.join('robot', doc)
for doc in os.listdir(robot_dir)
if doc.endswith('.robot') and doc.startswith('test_')
]
for robot_test in robot_tests:
robottestsuite = robotsuite.RobotTestSuite(robot_test)
robottestsuite.level = ROBOT_TEST_LEVEL
suite.addTests([layered(robottestsuite, layer=DOCPOOL_API_ACCEPTANCE_TESTING)])
return suite
|
OpenBfS/dokpool-plone
|
Plone/src/docpool.api/src/docpool/api/tests/test_robot.py
|
Python
|
gpl-3.0
| 801
|
from pymongo import MongoClient
from bson.objectid import ObjectId
from twisted.trial.unittest import TestCase
from tests.utils import ObjectMaker
from vusion.component import DialogueWorkerPropertyHelper, PrintLogger
from vusion.persist import RequestManager, Request
from vusion.persist.action import OptinAction, FeedbackAction, Actions
from vusion.context import Context
class TestRequestManager(TestCase, ObjectMaker):
def setUp(self):
self.database_name = 'test_program_db'
c = MongoClient(w=1)
db = c[self.database_name]
self.request_manager = RequestManager(db, 'requests')
#parameters:
self.property_helper = DialogueWorkerPropertyHelper(None, None)
self.property_helper['timezone'] = 'Africa/Kampala'
self.request_manager.set_property_helper(self.property_helper)
def tearDown(self):
self.clearData()
def clearData(self):
self.request_manager.drop()
def test_load_requests(self):
request_join_id = self.request_manager.save(self.mkobj_request_join())
request_leave_id = self.request_manager.save(self.mkobj_request_leave())
self.request_manager.load_requests()
self.assertEqual(
len(self.request_manager.loaded_requests),
2)
def test_load_request(self):
request_id = self.request_manager.save(self.mkobj_request_join())
self.request_manager.load_requests()
request_leave = self.mkobj_request_leave()
request_leave['_id'] = request_id
self.request_manager.save(request_leave)
self.request_manager.load_request(str(request_id))
self.assertEqual(
self.request_manager.loaded_requests[str(request_id)]['keyword'],
request_leave['keyword'])
self.request_manager.remove({'_id': request_id})
self.request_manager.load_request(str(request_id))
self.assertEqual(
len(self.request_manager.loaded_requests),
0)
def test_get_all_keywords(self):
self.request_manager.save(self.mkobj_request_join())
self.request_manager.save(self.mkobj_request_leave())
self.request_manager.load_requests()
keywords = self.request_manager.get_all_keywords()
self.assertEqual(
keywords, ['www', 'quit', 'quitnow'])
def test_get_matching_request_actions_keyphrase_matching(self):
join_id = self.request_manager.save(self.mkobj_request_join())
self.request_manager.save(self.mkobj_request_reponse_lazy_matching('www stuff'))
self.request_manager.load_requests()
actions = Actions()
msg = "www"
context = Context()
self.request_manager.get_matching_request_actions(msg, actions, context)
self.assertEqual(4, len(actions))
self.assertTrue(isinstance(actions[0], OptinAction))
self.assertEqual(context['request-id'], join_id)
def test_get_matching_request_actions_keyword_matching(self):
self.request_manager.save(self.mkobj_request_join())
lazy_id = self.request_manager.save(self.mkobj_request_reponse_lazy_matching('www stuff'))
self.request_manager.load_requests()
actions = Actions()
msg = "www somethingelse"
context = Context()
self.request_manager.get_matching_request_actions(msg, actions, context)
self.assertEqual(1, len(actions))
self.assertTrue(isinstance(actions[0], FeedbackAction))
self.assertEqual(context['request-id'], lazy_id)
|
texttochange/vusion-backend
|
vusion/persist/request/tests/test_request_manager.py
|
Python
|
bsd-3-clause
| 3,618
|
# -*- coding: utf-8 -*-
import datetime
from dateutil import relativedelta
from email.utils import formataddr, getaddresses
from email_utils import email_get
from openerp import api, fields, models, _
from openerp.exceptions import AccessError, ValidationError
from __builtin__ import False
TICKET_PRIORITY = [
('0','Low'),
('1','Normal'),
('2','High'),
]
class HelpdeskTeam(models.Model):
_name = "helpdesk.team"
_inherit = ['mail.thread', 'ir.needaction_mixin']
_inherits = {'mail.alias': 'alias_id'}
_description = "Helpdesk Team"
_order = 'sequence,name'
alias_id = fields.Many2one('mail.alias', string='Alias', ondelete="restrict", required=True,
help="The email address associated with this team. New emails received will automatically create new tickets assigned to the team.")
name = fields.Char('Helpdesk Team', required=True, translate=True)
description = fields.Text('About Team', translate=True)
company_id = fields.Many2one('res.company', string='Company',
default=lambda self: self.env['res.company']._company_default_get('helpdesk.team'))
sequence = fields.Integer(default=10)
color = fields.Integer('Color Index')
stage_ids = fields.Many2many('helpdesk.stage', relation='team_stage_rel', string='Stages',
default=[(0, 0, {'name': 'New', 'sequence': 0})],
help="Stages the team will use. This team's tickets will only be able to be in these stages.")
assign_method = fields.Selection([
('manual', 'Manually'),
('randomly', 'Randomly'),
('balanced', 'Balanced')], string='Assignation Method',
default='manual', required=True,
help='Automatic assignation method for new tickets:\n'
'\tManually: manual\n'
'\tRandomly: randomly but everyone gets the same amount\n'
'\tBalanced: to the person with the least amount of open tickets')
member_ids = fields.Many2many('res.users', string='Team Members', domain=lambda self: [('groups_id', 'in', self.env.ref('helpdesk_sicepat.group_helpdesk_user').id)])
ticket_ids = fields.One2many('helpdesk.ticket', 'team_id', string='Tickets')
use_alias = fields.Boolean('Email alias')
use_website_helpdesk_form = fields.Boolean('Website Form')
use_website_helpdesk_livechat = fields.Boolean('Live chat',
help="In Channel: You can create a new ticket by typing /helpdesk [ticket title]. You can search ticket by typing /helpdesk_search [Keyword1],[Keyword2],.")
use_website_helpdesk_forum = fields.Boolean('Help Center')
use_website_helpdesk_slides = fields.Boolean('eLearning')
use_website_helpdesk_rating = fields.Boolean('Website Rating')
use_twitter = fields.Boolean('Twitter')
use_api = fields.Boolean('API')
use_rating = fields.Boolean('Ratings')
use_sla = fields.Boolean('SLA Policies')
upcoming_sla_fail_tickets = fields.Integer(string='Upcoming SLA Fail Tickets', compute='_compute_upcoming_sla_fail_tickets')
unassigned_tickets = fields.Integer(string='Unassigned Tickets', compute='_compute_unassigned_tickets')
percentage_satisfaction = fields.Integer(
compute="_compute_percentage_satisfaction", string="% Happy", store=True, default=-1)
parent_id = fields.Many2one('helpdesk.team', string='Parent Team', index=True)
child_ids = fields.One2many('helpdesk.team', 'parent_id', string='Child Teams')
@api.depends('ticket_ids.rating_ids.rating')
def _compute_percentage_satisfaction(self):
for team in self:
activities = team.ticket_ids.rating_get_grades()
total_activity_values = sum(activities.values())
team.percentage_satisfaction = activities['great'] * 100 / total_activity_values if total_activity_values else -1
@api.multi
def _compute_upcoming_sla_fail_tickets(self):
ticket_data = self.env['helpdesk.ticket'].read_group([
('sla_active', '=', True),
('team_id', 'in', self.ids),
('deadline', '!=', False),
('deadline', '<=', fields.Datetime.to_string((datetime.date.today() + relativedelta.relativedelta(days=1)))),
], ['team_id'], ['team_id'])
mapped_data = dict((data['team_id'][0], data['team_id_count']) for data in ticket_data)
for team in self:
team.upcoming_sla_fail_tickets = mapped_data.get(team.id, 0)
@api.multi
def _compute_unassigned_tickets(self):
ticket_data = self.env['helpdesk.ticket'].read_group([('user_id', '=', False), ('team_id', 'in', self.ids), ('stage_id.is_close', '!=', True)], ['team_id'], ['team_id'])
mapped_data = dict((data['team_id'][0], data['team_id_count']) for data in ticket_data)
for team in self:
team.unassigned_tickets = mapped_data.get(team.id, 0)
@api.onchange('member_ids')
def _onchange_member_ids(self):
if not self.member_ids:
self.assign_method = 'manual'
@api.constrains('assign_method', 'member_ids')
def _check_member_assignation(self):
if not self.member_ids and self.assign_method != 'manual':
raise ValidationError(_("You must have team members assigned to change the assignation method."))
@api.onchange('use_alias')
def _onchange_use_alias(self):
if not self.alias_name:
self.alias_name = self.env['mail.alias']._clean_and_make_unique(self.name) if self.use_alias else False
@api.model
def create(self, vals):
create_ctx = dict(
mail_create_nolog=True,
mail_create_nosubscribe=True,
alias_model_name='helpdesk.ticket',
alias_parent_model_name=self._name
)
team = super(HelpdeskTeam, self.with_context(create_ctx)).create(vals)
team.alias_id.write({
'alias_parent_thread_id': team.id,
'alias_defaults': {'team_id': team.id},
})
team.sudo()._check_sla_group()
# team.sudo()._check_modules_to_install()
# If you plan to add something after this, use a new environment. The one above is no longer valid after the modules install.
return team
@api.multi
def write(self, vals):
result = super(HelpdeskTeam, self).write(vals)
self.sudo()._check_sla_group()
# self.sudo()._check_modules_to_install()
# If you plan to add something after this, use a new environment. The one above is no longer valid after the modules install.
return result
@api.multi
def unlink(self):
stages = self.mapped('stage_ids').filtered(lambda stage: stage.team_ids <= self)
stages.unlink()
aliases = self.mapped('alias_id')
aliases.unlink()
return super(HelpdeskTeam, self).unlink()
@api.multi
def _check_sla_group(self):
for team in self:
if team.use_sla and not self.user_has_groups('helpdesk_sicepat.group_use_sla'):
self.env.ref('helpdesk_sicepat.group_helpdesk_user').write({'implied_ids': [(4, self.env.ref('helpdesk_sicepat.group_use_sla').id)]})
if team.use_sla:
self.env['helpdesk.sla'].with_context(active_test=False).search([('team_id', '=', team.id), ('active', '=', False)]).write({'active': True})
else:
self.env['helpdesk.sla'].search([('team_id', '=', team.id)]).write({'active': False})
if not self.search_count([('use_sla', '=', True)]):
self.env.ref('helpdesk_sicepat.group_helpdesk_user').write({'implied_ids': [(3, self.env.ref('helpdesk_sicepat.group_use_sla').id)]})
self.env.ref('helpdesk_sicepat.group_use_sla').write({'users': [(5, 0, 0)]})
@api.multi
def _check_modules_to_install(self):
module_installed = False
for team in self:
form_module = self.env['ir.module.module'].search([('name', '=', 'website_helpdesk_form')])
if self.use_website_helpdesk_form and form_module.state not in ('installed', 'to install', 'to upgrade'):
form_module.button_immediate_install()
module_installed = True
livechat_module = self.env['ir.module.module'].search([('name', '=', 'website_helpdesk_livechat')])
if self.use_website_helpdesk_livechat and livechat_module.state not in ('installed', 'to install', 'to upgrade'):
livechat_module.button_immediate_install()
module_installed = True
forum_module = self.env['ir.module.module'].search([('name', '=', 'website_helpdesk_forum')])
if self.use_website_helpdesk_forum and forum_module.state not in ('installed', 'to install', 'to upgrade'):
forum_module.button_immediate_install()
module_installed = True
slides_module = self.env['ir.module.module'].search([('name', '=', 'website_helpdesk_slides')])
if self.use_website_helpdesk_slides and slides_module.state not in ('installed', 'to install', 'to upgrade'):
slides_module.button_immediate_install()
module_installed = True
rating_module = self.env['ir.module.module'].search([('name', '=', 'website_helpdesk')])
if self.use_website_helpdesk_rating and rating_module.state not in ('installed', 'to install', 'to upgrade'):
rating_module.button_immediate_install()
module_installed = True
# just in case we want to do something if we install a module. (like a refresh ...)
return module_installed
# will be deleted
@api.model
def retrieve_dashboard(self):
domain = [('user_id', '=', self.env.uid)]
group_fields = ['priority', 'create_date', 'stage_id', 'close_hours']
#TODO: remove SLA calculations if user_uses_sla is false.
user_uses_sla = self.user_has_groups('helpdesk_sicepat.group_use_sla') and\
bool(self.env['helpdesk.team'].search([('use_sla', '=', True), '|', ('member_ids', 'in', self._uid), ('member_ids', '=', False)]))
if user_uses_sla:
group_fields.insert(1, 'sla_fail')
HelpdeskTicket = self.env['helpdesk.ticket']
tickets = HelpdeskTicket.read_group(domain + [('stage_id.is_close', '=', False)], group_fields, group_fields, lazy=False)
result = {
'helpdesk_target_closed': self.env.user.helpdesk_target_closed,
'helpdesk_target_rating': self.env.user.helpdesk_target_rating,
'helpdesk_target_success': self.env.user.helpdesk_target_success,
'today': {'count': 0, 'rating': 0, 'success': 0},
'7days': {'count': 0, 'rating': 0, 'success': 0},
'my_all': {'count': 0, 'hours': 0, 'failed': 0},
'my_high': {'count': 0, 'hours': 0, 'failed': 0},
'my_urgent': {'count': 0, 'hours': 0, 'failed': 0},
'show_demo': not bool(HelpdeskTicket.search([], limit=1)),
'rating_enable': False,
'success_rate_enable': user_uses_sla
}
def add_to(ticket, key="my_all"):
result[key]['count'] += ticket['__count']
result[key]['hours'] += ticket['close_hours']
if ticket.get('sla_fail'):
result[key]['failed'] += ticket['__count']
for ticket in tickets:
add_to(ticket, 'my_all')
if ticket['priority'] in ('2'):
add_to(ticket, 'my_high')
if ticket['priority'] in ('3'):
add_to(ticket, 'my_urgent')
dt = fields.Date.today()
tickets = HelpdeskTicket.read_group(domain + [('stage_id.is_close', '=', True), ('close_date', '>=', dt)], group_fields, group_fields, lazy=False)
for ticket in tickets:
result['today']['count'] += ticket['__count']
if not ticket.get('sla_fail'):
result['today']['success'] += ticket['__count']
dt = fields.Datetime.to_string((datetime.date.today() - relativedelta.relativedelta(days=6)))
tickets = HelpdeskTicket.read_group(domain + [('stage_id.is_close', '=', True), ('close_date', '>=', dt)], group_fields, group_fields, lazy=False)
for ticket in tickets:
result['7days']['count'] += ticket['__count']
if not ticket.get('sla_fail'):
result['7days']['success'] += ticket['__count']
result['today']['success'] = (result['today']['success'] * 100) / (result['today']['count'] or 1)
result['7days']['success'] = (result['7days']['success'] * 100) / (result['7days']['count'] or 1)
result['my_all']['hours'] = result['my_all']['hours'] / (result['my_all']['count'] or 1)
result['my_high']['hours'] = result['my_high']['hours'] / (result['my_high']['count'] or 1)
result['my_urgent']['hours'] = result['my_urgent']['hours'] / (result['my_urgent']['count'] or 1)
if self.env['helpdesk.team'].search([('use_rating', '=', True), '|', ('member_ids', 'in', self._uid), ('member_ids', '=', False)]):
result['rating_enable'] = True
# rating of today
domain = [('user_id', '=', self.env.uid)]
dt = fields.Date.today()
tickets = self.env['helpdesk.ticket'].search(domain + [('stage_id.is_close', '=', True), ('close_date', '>=', dt)])
activity = tickets.rating_get_grades()
total_rating = self.compute_activity_avg(activity)
total_activity_values = sum(activity.values())
team_satisfaction = round((total_rating / total_activity_values if total_activity_values else 0), 2)
if team_satisfaction:
result['today']['rating'] = team_satisfaction
# rating of last 7 days (6 days + today)
dt = fields.Datetime.to_string((datetime.date.today() - relativedelta.relativedelta(days=6)))
tickets = self.env['helpdesk.ticket'].search(domain + [('stage_id.is_close', '=', True), ('close_date', '>=', dt)])
activity = tickets.rating_get_grades()
total_rating = self.compute_activity_avg(activity)
total_activity_values = sum(activity.values())
team_satisfaction_7days = round((total_rating / total_activity_values if total_activity_values else 0), 2)
if team_satisfaction_7days:
result['7days']['rating'] = team_satisfaction_7days
return result
@api.multi
def action_view_ticket_rating(self):
""" return the action to see all the rating about the tickets of the Team """
domain = [('team_id', 'in', self.ids)]
if self.env.context.get('seven_days'):
domain += [('close_date', '>=', fields.Datetime.to_string((datetime.date.today() - relativedelta.relativedelta(days=6))))]
elif self.env.context.get('today'):
domain += [('close_date', '>=', fields.Datetime.to_string(datetime.date.today()))]
if self.env.context.get('helpdesk'):
domain += [('user_id', '=', self._uid), ('stage_id.is_close', '=', True)]
ticket_ids = self.env['helpdesk.ticket'].search(domain).ids
domain = [('res_id', 'in', ticket_ids), ('rating', '!=', -1), ('res_model', '=', 'helpdesk.ticket')]
action = self.env.ref('rating.action_view_rating').read()[0]
action['domain'] = domain
return action
@api.model
def helpdesk_rating_today(self):
# call this method of on click "Customer Rating" button on dashbord for today rating of teams tickets
return self.search(['|', ('member_ids', 'in', self._uid), ('member_ids', '=', False)]).with_context(helpdesk=True, today=True).action_view_ticket_rating()
@api.model
def helpdesk_rating_7days(self):
# call this method of on click "Customer Rating" button on dashbord for last 7days rating of teams tickets
return self.search(['|', ('member_ids', 'in', self._uid), ('member_ids', '=', False)]).with_context(helpdesk=True, seven_days=True).action_view_ticket_rating()
@api.multi
def action_view_all_rating(self):
""" return the action to see all the rating about the all sort of activity of the team (tickets) """
return self.action_view_ticket_rating()
@api.multi
def action_unhappy_rating_ticket(self):
self.ensure_one()
action = self.env.ref('helpdesk_sicepat.helpdesk_ticket_action_main').read()[0]
action['domain'] = [('team_id', '=', self.id), ('user_id', '=', self.env.uid), ('rating_ids.rating', '=', 1)]
action['context'] = {'default_team_id': self.id}
return action
@api.model
def compute_activity_avg(self, activity):
# compute average base on all rating value
# like: 5 great, 2 okey, 1 bad
# great = 10, okey = 5, bad = 0
# (5*10) + (2*5) + (1*0) = 60 / 8 (nuber of activity for rating)
great = activity['great'] * 10.00
okey = activity['okay'] * 5.00
bad = activity['bad'] * 0.00
return great + okey + bad
@api.model
def modify_target_helpdesk_team_dashboard(self, target_name, target_value):
if target_name:
self.env.user.sudo().write({target_name: target_value})
else:
raise ValidationError(_('This target does not exist.'))
@api.multi
def get_new_user(self):
self.ensure_one()
new_user = self.env['res.users']
member_ids = sorted(self.member_ids.ids)
if member_ids:
if self.assign_method == 'randomly':
# randomly means new ticketss get uniformly distributed
previous_assigned_user = self.env['helpdesk.ticket'].search([('team_id', '=', self.id)], order='create_date desc', limit=1).user_id
# handle the case where the previous_assigned_user has left the team (or there is none).
if previous_assigned_user and previous_assigned_user.id in member_ids:
previous_index = member_ids.index(previous_assigned_user.id)
new_user = new_user.browse(member_ids[(previous_index + 1) % len(member_ids)])
else:
new_user = new_user.browse(member_ids[0])
elif self.assign_method == 'balanced':
read_group_res = self.env['helpdesk.ticket'].read_group([('stage_id.is_close', '=', False), ('user_id', 'in', member_ids)], ['user_id'], ['user_id'])
# add all the members in case a member has no more open tickets (and thus doesn't appear in the previous read_group)
count_dict = dict((m_id, 0) for m_id in member_ids)
count_dict.update((data['user_id'][0], data['user_id_count']) for data in read_group_res)
new_user = new_user.browse(min(count_dict, key=count_dict.get))
return new_user
class HelpdeskStage(models.Model):
_name = 'helpdesk.stage'
_description = 'Stage'
_order = 'sequence, id'
def _get_default_team_ids(self):
team_id = self.env.context.get('default_team_id')
if team_id:
return [(4, team_id, 0)]
name = fields.Char(required=True)
sequence = fields.Integer('Sequence', default=10)
blocked = fields.Boolean(help='Tickets in this stage will automatically blocked.')
is_close = fields.Boolean(
'Closing Kanban Stage',
help='Tickets in this stage are considered as done. This is used notably when '
'computing SLAs and KPIs on tickets.')
fold = fields.Boolean(
'Folded', help='Folded in kanban view')
team_ids = fields.Many2many(
'helpdesk.team', relation='team_stage_rel', string='Team',
default=_get_default_team_ids,
help='Specific team that uses this stage. Other teams will not be able to see or use this stage.')
template_id = fields.Many2one(
'email.template', 'Automated Answer Email Template',
domain="[('model', '=', 'helpdesk.ticket')]",
help="Automated email sent to the ticket's customer when the ticket reaches this stage.")
class HelpdeskTicketSource(models.Model):
_name = 'helpdesk.ticket.source'
_description = 'Ticket Source'
_order = 'parent_id, name'
name = fields.Char(required=True, translate=True)
default = fields.Boolean(string='Set as default ticket source', default=False)
sequence_id = fields.Many2one('ir.sequence', string='Default ticket number for this source')
parent_id = fields.Many2one('helpdesk.ticket.source', 'Parent', domain="[('parent_id', '=', False)]", ondelete="restrict")
_sql_constraints = [
('name_uniq', 'unique (name)', _("Source name already exists !")),
]
@api.constrains('default')
def _check_default(self):
self.ensure_one()
if self.default:
Ticketsource = self.search([('id', '!=', self.id), ('default', '=', True)])
Ticketsource.write({'default': False})
@api.constrains('parent_id')
def _check_parent_id(self):
if not self._check_recursion():
raise ValidationError(_('Error ! You cannot create a recursive Ticket Source.'))
if self.parent_id:
Children = self.search([('parent_id', '=', self.id)])
if Children:
raise ValidationError(_('Error ! You cannot choose parent for this Ticket Source.'))
class HelpdeskTicketType(models.Model):
_name = 'helpdesk.ticket.type'
_description = 'Ticket Type'
_order = 'sequence'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(default=10)
send_mail = fields.Boolean('Send Email Automatically')
_sql_constraints = [
('name_uniq', 'unique (name)', _("Type name already exists !")),
]
class HelpdeskTicketCategory(models.Model):
_name = 'helpdesk.ticket.category'
_description = 'Ticket Category'
_order = 'sequence'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(default=10)
_sql_constraints = [
('name_uniq', 'unique (name)', _("Category name already exists !")),
]
class ConsignmentCondition(models.Model):
_name = 'consignment.condition'
_description = 'Consignment Condition'
_order = 'sequence'
name = fields.Char(required=True, translate=True)
sequence = fields.Integer(default=10)
_sql_constraints = [
('name_uniq', 'unique (name)', _("Condition name already exists !")),
]
class HelpdeskTag(models.Model):
_name = 'helpdesk.tag'
_description = 'Tags'
_order = 'name'
name = fields.Char(required=True)
color = fields.Integer('Color')
_sql_constraints = [
('name_uniq', 'unique (name)', _("Tag name already exists !")),
]
class HelpdeskSLA(models.Model):
_name = "helpdesk.sla"
_order = "name"
_description = "Helpdesk SLA Policies"
name = fields.Char('SLA Policy Name', required=True, index=True)
description = fields.Text('SLA Policy Description')
active = fields.Boolean('Active', default=True)
team_id = fields.Many2one('helpdesk.team', 'Team', required=True)
team_escalate_id = fields.Many2one('helpdesk.team', 'Escalate')
ticket_type_id = fields.Many2one(
'helpdesk.ticket.type', "Ticket Type",
help="Only apply the SLA to a specific ticket type. If left empty it will apply to all types.")
stage_id = fields.Many2one(
'helpdesk.stage', 'Target Stage', required=True,
help='Minimum stage a ticket needs to reach in order to satisfy this SLA.')
priority = fields.Selection(
TICKET_PRIORITY, string='Minimum Priority',
default='0', required=True,
help='Tickets under this priority will not be taken into account.')
company_id = fields.Many2one('res.company', 'Company', related='team_id.company_id', readonly=True, store=True)
time_days = fields.Integer('Days', default=0, required=True, help="Days to reach given stage based on ticket creation date")
time_hours = fields.Integer('Hours', default=0, required=True, help="Hours to reach given stage based on ticket creation date")
time_minutes = fields.Integer('Minutes', default=0, required=True, help="Minutes to reach given stage based on ticket creation date")
@api.onchange('time_hours')
def _onchange_time_hours(self):
if self.time_hours >= 24:
self.time_days += self.time_hours / 24
self.time_hours = self.time_hours % 24
@api.onchange('time_minutes')
def _onchange_time_minutes(self):
if self.time_minutes >= 60:
self.time_hours += self.time_minutes / 60
self.time_minutes = self.time_minutes % 60
class HelpdeskTicket(models.Model):
_name = 'helpdesk.ticket'
_description = 'Ticket'
_order = 'message_last_post desc, priority desc, id desc'
_inherit = ['mail.thread', 'ir.needaction_mixin', 'rating.mixin']
_track = {
'user_id': {
'helpdesk_sicepat.mt_ticket_assigned': lambda self, cr, uid, obj, ctx=None: obj.user_id and obj.user_id.id,
},
'stage_id': {
# this is only an heuristics; depending on your particular stage configuration it may not match all 'new' stages
'helpdesk_sicepat.mt_ticket_new': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id.sequence < 1,
'helpdesk_sicepat.mt_ticket_stage': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id.sequence >= 1,
},
}
@api.model
def default_get(self, fields):
res = super(HelpdeskTicket, self).default_get(fields)
if res.get('team_id'):
update_vals = self._onchange_team_get_values(self.env['helpdesk.team'].browse(res['team_id']))
if (not fields or 'user_id' in fields) and 'user_id' not in res:
res['user_id'] = update_vals['user_id']
if (not fields or 'stage_id' in fields) and 'stage_id' not in res:
res['stage_id'] = update_vals['stage_id']
return res
def _default_team_id(self):
team_id = self._context.get('default_team_id')
if not team_id:
team_id = self.env['helpdesk.team'].search([('member_ids', 'in', self.env.uid)], limit=1).id
if not team_id:
team_id = self.env['helpdesk.team'].search([], limit=1).id
return team_id
def _default_ticket_source_id(self):
ticket_source = self.env['helpdesk.ticket.source'].search([('default', '=', True)], limit=1)
return ticket_source and ticket_source.id or False
@api.model
def _resolve_team_id_from_context(self):
""" Returns ID of team based on the value of 'default_team_id'
context key, or None if it cannot be resolved to a single
team.
"""
if type(self.env.context.get('default_team_id')) in (int, long):
return self.env.context['default_team_id']
if isinstance(self.env.context.get('default_team_id'), basestring):
team_name = self.env.context['default_team_id']
teams = self.env['helpdesk.team'].name_search(name=team_name)
if len(teams) == 1:
return int(teams[0][0])
return None
@api.model
def _read_group_stage_ids(self, present_ids, domain, read_group_order=None, access_rights_uid=None):
# retrieve stage_id from the context and write the domain
# - ('id', 'in', 'ids'): add columns that should be present
# - OR ('team_ids', '=', team_id), ('fold', '=', False) if team_id: add team columns that are not folded
access_rights_uid = access_rights_uid or self.env.uid
Stage = self.env['helpdesk.stage']
order = Stage._order
# lame hack to allow reverting search, should just work in the trivial case
if read_group_order == 'stage_id desc':
order = "%s desc" % order
search_domain = [('id', 'in', present_ids)]
team_id = self._resolve_team_id_from_context()
if team_id:
search_domain = ['|', ('team_ids', '=', team_id)] + search_domain
# perform search
stage_ids = Stage._search(search_domain, order=order, access_rights_uid=access_rights_uid)
stages = Stage.sudo(access_rights_uid).browse(stage_ids)
result = stages.name_get()
# restore order of the search
result.sort(lambda x, y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0])))
fold = {}
for stage in stages:
fold[stage.id] = stage.fold or False
return result, fold
_group_by_full = {
'stage_id': _read_group_stage_ids,
}
name = fields.Char(string='Subject', required=True, index=True, track_visibility='onchange')
number = fields.Char(string='Number of ticket', copy=False, readonly=True, track_visibility='onchange')
team_id = fields.Many2one('helpdesk.team', string='Helpdesk Team', default=_default_team_id, track_visibility='onchange', index=True)
description = fields.Text(track_visibility='onchange')
active = fields.Boolean(default=True)
ticket_source_id = fields.Many2one('helpdesk.ticket.source', string="Ticket Source", default=_default_ticket_source_id, track_visibility='onchange', domain="[('parent_id', '!=', False)]")
parent_ticket_source_id = fields.Many2one(related='ticket_source_id.parent_id', string='Source Category', store=True)
ticket_type_id = fields.Many2one('helpdesk.ticket.type', string="Ticket Type", track_visibility='onchange')
tag_ids = fields.Many2many('helpdesk.tag', string='Tags', track_visibility='onchange')
company_id = fields.Many2one(related='team_id.company_id', string='Company', store=True, readonly=True)
color = fields.Integer(string='Color Index')
kanban_state = fields.Selection([
('normal', 'Normal'),
('blocked', 'Blocked'),
('done', 'Ready for next stage')], string='Kanban State',
default='normal', required=True, track_visibility='onchange',
help="A ticket's kanban state indicates special situations affecting it:\n"
"* Normal is the default situation\n"
"* Blocked indicates something is preventing the progress of this issue\n"
"* Ready for next stage indicates the issue is ready to be pulled to the next stage")
user_id = fields.Many2one('res.users', string='Assigned to', track_visibility='onchange', domain="[('team_ids', '=', team_id)]")
partner_id = fields.Many2one('res.partner', string='Customer', track_visibility='onchange')
partner_tickets = fields.Integer('Number of tickets from the same partner', compute='_compute_partner_tickets')
# Used to submit tickets from a contact form
partner_name = fields.Char(string='Customer Name', track_visibility='onchange')
partner_email = fields.Char(string='Customer Email', track_visibility='onchange')
partner_phone = fields.Char(string='Customer Phone', track_visibility='onchange')
# Used in message_get_default_recipients, so if no partner is created, email is sent anyway
email = fields.Char(related='partner_email', string='Email')
partner_type = fields.Selection([
('recipient','Recipient'),
('customer','Customer'),
('internal','Internal'),
], string='Contact Type', default='customer', track_visibility='onchange')
ticket_category_id = fields.Many2one('helpdesk.ticket.category', string='Category', track_visibility='onchange')
fault_ticket = fields.Selection([
('origin','Origin'),
('destination','Destination'),
], string='Fault', default='origin', track_visibility='onchange')
priority = fields.Selection(TICKET_PRIORITY, string='Priority', default='0')
stage_id = fields.Many2one('helpdesk.stage', string='Stage', copy=False, index=True, track_visibility='onchange', domain="[('team_ids', '=', team_id)]")
# next 4 fields are computed in write (or create)
assign_date = fields.Datetime(string='First assignation date')
assign_hours = fields.Integer(string='Time to first assignation (hours)', compute='_compute_assign_hours', store=True)
close_date = fields.Datetime(string='Close date')
close_hours = fields.Integer(string='Open Time (hours)', compute='_compute_close_hours', store=True)
sla_id = fields.Many2one('helpdesk.sla', string='SLA Policy', compute='_compute_sla', store=True)
sla_name = fields.Char(string='SLA Policy name', compute='_compute_sla', store=True) # care if related -> crash on creation with a team.
deadline = fields.Datetime(string='Deadline', compute='_compute_sla', store=True)
sla_active = fields.Boolean(string='SLA active', compute='_compute_sla_fail', store=True)
sla_fail = fields.Boolean(string='Failed SLA Policy', compute='_compute_sla_fail', store=True)
ticket_solution = fields.Selection([
('replacement', 'Dengan Penggantian'),
('no_replacement', 'Tanpa Penggantian')
], string='Resolution', default='no_replacement', required=True, track_visibility='onchange')
transfer = fields.Boolean('Bank Transfer', track_visibility='onchange')
goods = fields.Char('Goods of Consignment', track_visibility='onchange')
goods_value = fields.Float('Goods Value', track_visibility='onchange')
goods_replacement_amt = fields.Float('Replacement Amount', track_visibility='onchange')
replacement_by = fields.Selection([
('ho', 'Head Office'),
('branch', 'Branch'),
('jne', 'JNE'),
], string='Replaced By', default='ho', track_visibility='onchange')
notes = fields.Text('Notes', track_visibility='onchange')
responsible_type = fields.Selection([
('internal', 'Internal'),
('external', 'External')
], string='Responsible Type', default='internal', required=True, track_visibility='onchange')
responsible_id = fields.Many2one('hr.employee', string='User Responsible', track_visibility='onchange')
responsible_user = fields.Char(string='User Responsible', track_visibility='onchange')
goods_return = fields.Boolean('Goods Returned?', track_visibility='onchange')
consignment_cond_id = fields.Many2one('consignment.condition', string='Condition of Consignment', track_visibility='onchange')
status_process = fields.Selection([
('refund', 'Refund'),
('return', 'Return'),
], string='Status Process', default='refund', track_visibility='onchange')
refund = fields.Boolean('Refund?', track_visibility='onchange')
def _onchange_team_get_values(self, team):
return {
'user_id': team.get_new_user().id,
'stage_id': self.env['helpdesk.stage'].search([('team_ids', 'in', team.id)], order='sequence', limit=1).id
}
@api.onchange('team_id')
def _onchange_team_id(self):
if self.team_id:
values = self._onchange_team_get_values(self.team_id)
self.update(values)
@api.onchange('partner_id')
def _onchange_partner_id(self):
if self.partner_id:
self.partner_name = self.partner_id.name
self.partner_email = self.partner_id.email
@api.depends('partner_id')
def _compute_partner_tickets(self):
# self.ensure_one()
ticket_data = False
for ticket in self:
ticket_data = ticket.read_group([
('partner_id', '=', ticket.partner_id.id),
('stage_id.is_close', '=', False)
], ['partner_id'], ['partner_id'])
if ticket_data:
ticket.partner_tickets = ticket_data[0]['partner_id_count']
@api.depends('assign_date')
def _compute_assign_hours(self):
for ticket in self:
time_difference = datetime.datetime.now() - fields.Datetime.from_string(ticket.create_date)
ticket.assign_hours = (time_difference.seconds) / 3600 + time_difference.days * 24
@api.depends('close_date')
def _compute_close_hours(self):
for ticket in self:
if not ticket.close_date:
continue;
time_difference = fields.Datetime.from_string(ticket.close_date) - fields.Datetime.from_string(ticket.create_date)
ticket.close_hours = (time_difference.seconds) / 3600 + time_difference.days * 24
@api.depends('team_id', 'priority', 'ticket_type_id', 'create_date')
def _compute_sla(self):
if not self.user_has_groups("helpdesk_sicepat.group_use_sla"):
return
sla_info = {}
ticket_dict = {}
for sla in self.env['helpdesk.sla'].search([], order="time_days, time_hours, time_minutes"):
key = sla.team_id.id
if sla_info.get(key):
sla_info[key] += sla
else:
sla_info[key] = sla
for ticket in self:
team_id = ticket.team_id and ticket.team_id.id or 'team_id'
priority = ticket.priority
ticket_type = ticket.ticket_type_id and ticket.ticket_type_id or 'ticket_type'
if ticket_dict.get(team_id):
if ticket_dict[team_id].get(priority):
if ticket_dict[team_id][priority].get(ticket_type):
ticket_dict[team_id][priority][ticket_type] += ticket
else:
ticket_dict[team_id][priority][ticket_type] = ticket
else:
ticket_dict[team_id].setdefault(priority, {ticket_type: ticket})
else:
ticket_dict.setdefault(team_id, {priority: {ticket_type: ticket}})
for team_id, t_team_dict in ticket_dict.items():
if sla_info.get(team_id):
for priority, t_type_dict in t_team_dict.items():
for ticket_type, tickets in t_type_dict.items():
sla = [rec for rec in sla_info[team_id] if rec.priority <= priority and (rec.ticket_type_id == ticket_type or not rec.ticket_type_id)]
for ticket in tickets:
if sla and ticket.active and ticket.create_date:
ticket.sla_id = sla[0].id
ticket.sla_name = sla[0].name
ticket.deadline = fields.Datetime.from_string(ticket.create_date) + relativedelta.relativedelta(days=sla[0].time_days, hours=sla[0].time_hours, minutes=sla[0].time_minutes)
# additional
if sla[0].escalate_team_method == 'team':
ticket.team_id = sla[0].team_escalate_id.id
ticket._onchange_team_id()
@api.depends('deadline', 'stage_id')
def _compute_sla_fail(self):
if not self.user_has_groups("helpdesk_sicepat.group_use_sla"):
return
for ticket in self:
ticket.sla_active = True
if not ticket.deadline:
ticket.sla_active = False
elif ticket.sla_id.stage_id.sequence <= ticket.stage_id.sequence:
ticket.sla_active = False
if fields.Datetime.now() > ticket.deadline:
ticket.sla_fail = True
@api.model
def create(self, vals):
if vals.get('team_id'):
vals.update(item for item in self._onchange_team_get_values(self.env['helpdesk.team'].browse(vals['team_id'])).items() if item[0] not in vals)
if self.env.context.get('fetchmail_cron_running') and vals.get('user_id'):
self.env.uid = vals['user_id']
# context: no_log, because subtype already handle this
ticket = super(HelpdeskTicket, self.with_context(mail_create_nolog=True)).create(vals)
if ticket.env.context.get('fetchmail_cron_running') and ticket.env.context.get('default_date'):
ticket.env.cr.execute('''UPDATE helpdesk_ticket SET create_date=%s WHERE id=%s''', (ticket.env.context['default_date'], ticket.id))
if ticket.partner_id:
ticket.message_subscribe(partner_ids=ticket.partner_id.ids)
ticket._onchange_partner_id()
if ticket.user_id:
ticket.assign_date = ticket.create_date
ticket.assign_hours = 0
return ticket
@api.multi
def write(self, vals):
# we set the assignation date (assign_date) to now for tickets that are being assigned for the first time
# same thing for the closing date
if vals.get('stage_id') and any(ticket.kanban_state == 'blocked' for ticket in self):
raise ValidationError(_("You must change Kanban State to 'Normal' or 'Ready for next stage'."))
for ticket in self:
if vals.get('stage_id'):
if not ticket.number and self.env['helpdesk.stage'].browse(vals['stage_id']).sequence >= 1:
ticket.number = self.env['ir.sequence'].get_id(ticket.ticket_source_id.sequence_id.id)
# vals.update(dict(number=self.env['ir.sequence'].get_id(ticket.ticket_source_id.sequence_id.id)))
if vals.get('stage_id') and self.env['helpdesk.stage'].browse(vals['stage_id']).blocked:
vals.update(dict(kanban_state='blocked'))
assigned_tickets = closed_tickets = self.browse()
if vals.get('user_id'):
assigned_tickets = self.filtered(lambda ticket: not ticket.assign_date)
if vals.get('stage_id') and self.env['helpdesk.stage'].browse(vals.get('stage_id')).is_close:
closed_tickets = self.filtered(lambda ticket: not ticket.close_date)
now = datetime.datetime.now()
res = super(HelpdeskTicket, self - assigned_tickets - closed_tickets).write(vals)
res &= super(HelpdeskTicket, assigned_tickets - closed_tickets).write(dict(vals, **{
'assign_date': now,
}))
res &= super(HelpdeskTicket, closed_tickets - assigned_tickets).write(dict(vals, **{
'close_date': now,
}))
res &= super(HelpdeskTicket, assigned_tickets & closed_tickets).write(dict(vals, **{
'assign_date': now,
'close_date': now,
}))
for ticket in self:
if vals.get('stage_id') and self.env['helpdesk.stage'].browse(vals['stage_id']).template_id and ticket.ticket_type_id.send_mail:
ctx = dict(default_model=self._name, mark_notification_sent=True)
self.env['helpdesk.stage'].browse(vals['stage_id']).template_id.with_context(ctx).send_mail(ticket.id, force_send=True, raise_exception=True)
if vals.get('partner_id'):
self.message_subscribe([vals['partner_id']])
return res
@api.multi
def name_get(self):
result = []
for ticket in self:
new_name = "%s" % (ticket.name)
if ticket.number:
new_name = "%s (#%s)" % (new_name, ticket.number)
result.append((ticket.id, new_name))
return result
# Method to called by CRON to update SLA & statistics
@api.model
def recompute_all(self):
tickets = self.search([('stage_id.is_close', '=', False)])
tickets._compute_sla()
tickets._compute_close_hours()
return True
@api.multi
def assign_ticket_to_self(self):
self.ensure_one()
self.user_id = self.env.user
@api.multi
def open_customer_tickets(self):
return {
'type': 'ir.actions.act_window',
'name': _('Customer Tickets'),
'res_model': 'helpdesk.ticket',
'view_mode': 'kanban,tree,form,graph',
'context': {'search_default_is_open': True, 'search_default_partner_id': self.partner_id.id}
}
@api.multi
def ticket_escalate(self):
self.ensure_one()
action = self.env.ref('helpdesk_sicepat.action_ticket_escalate_team', False)
return action.read()[0]
#DVE FIXME: if partner gets created when sending the message it should be set as partner_id of the ticket.
@api.multi
def message_get_suggested_recipients(self):
recipients = super(HelpdeskTicket, self).message_get_suggested_recipients()
try:
for ticket in self:
if ticket.partner_id:
ticket._message_add_suggested_recipient(recipients, ticket, partner=ticket.partner_id, reason=_('Customer'))
elif ticket.partner_email:
ticket._message_add_suggested_recipient(recipients, ticket, email=ticket.partner_email, reason=_('Customer Email'))
except AccessError: # no read access rights -> just ignore suggested recipients because this implies modifying followers
pass
return recipients
@api.model
def message_new(self, msg, custom_values=None):
values = dict(
custom_values or {},
name=msg.get('subject') or _("No Subject"),
partner_name=email_get(msg.get('from'), email_address=False) and email_get(msg.get('from'), email_address=False)[0] or False,
partner_email=email_get(msg.get('from'))[0],
partner_id=msg.get('author_id'),
)
if msg.get('date'):
self = self.with_context(default_date=msg['date'])
return super(HelpdeskTicket, self).message_new(msg, custom_values=values)
@api.cr_uid_ids_context
def message_post(self, cr, uid, thread_id, body='', subject=None, type='notification',
subtype=None, parent_id=False, attachments=None, context=None,
content_subtype='html', **kwargs):
self.check_post_access(cr, uid, type=type, subtype=subtype, context=context)
context=dict(context)
context.update(self.ticket_default(cr, uid, thread_id, context=context))
# get the right format address of sender if there any
email_from = kwargs.get('email_from')
if email_from:
kwargs.update({'email_from': formataddr(getaddresses([email_from])[0])})
message_id = super(HelpdeskTicket, self).message_post(cr, uid, thread_id, body=body, subject=subject, type=type,
subtype=subtype, parent_id=parent_id, attachments=attachments, context=context,
content_subtype=content_subtype, **kwargs)
self._message_post_after_hook(cr, uid, message_id)
return message_id
@api.model
def check_post_access(self, type='notification', subtype=None):
if type == 'comment' and subtype and not self.user_has_groups("helpdesk_sicepat.group_send_email"):
raise AccessError(_("You do not have access to send message."))
return True
@api.model
def ticket_default(self, thread_id):
ticket = thread_id and self.browse(thread_id) or False
if ticket:
mail_server = ticket.team_id.company_id.mail_server_id
return {
'ticket': ticket,
'ticket_mail_server_id': mail_server.id,
'mail_auto_delete': False,
}
return {}
@api.model
def _message2im(self, message):
inbox_action = self.env.ref('mail.mail_inboxfeeds').id
inbox_url = '#menu_id=%s' % inbox_action
url = None
if message.res_id:
url = '#id=%s&model=%s&view_type=form' % (
message.res_id,
message.model
)
author = message.author_id and message.author_id.name_get()
author = author and author[0][1] or message.email_from
mtype = {'email': _('Email'),
'comment': _('Comment'),
'notification': _('System notification'),
}.get(message.type, '')
about = message.subject or message.record_name or 'UNDEFINED'
about = '[ABOUT] %s' % about
if url:
about = '<a href="%s">%s</a>' % (url, about)
im_text = [
'_____________________',
'<a href="%s">_____[open_inbox]_____</a>' % inbox_url,
'%s [FROM] %s' % (mtype, author),
about,
]
return im_text
#TODO: separate this to new module
@api.model
def _do_notify_im(self, im_uids, message):
im_text = self._message2im(message)
user_from = self.env.ref('helpdesk_sicepat.notif_user').id
ChatSession = self.env['im_chat.session']
message_type = 'message'
for user_to in im_uids:
session = ChatSession.sudo(user_from).session_get(user_to)
uuid = session.get('uuid')
message_content = '\n'.join(im_text)
self.env['im_chat.message'].sudo().post(user_from, uuid, message_type, message_content)
return True
@api.model
def _message_post_after_hook(self, message_id):
message = self.env['mail.message'].browse(message_id)
model, res_id, type = message.model, message.res_id, message.type
ticket = self.env[model].browse(res_id)
if ticket.partner_email and not ticket.partner_id:
# we consider that posting a message with a specified recipient (not a follower, a specific one)
# on a document without customer means that it was created through the chatter using
# suggested recipients. This heuristic allows to avoid ugly hacks in JS.
new_partner = message.partner_ids.filtered(lambda partner: partner.email == ticket.partner_email)
if new_partner:
self.search([
('partner_id', '=', False),
('partner_email', '=', new_partner.email),
('stage_id.fold', '=', False)]).write({'partner_id': new_partner.id})
if type == 'email':
self._do_notify_im([ticket.user_id.id], message)
else:
if message.author_id and message.author_id.id != ticket.user_id.partner_id.id:
self._do_notify_im([ticket.user_id.id], message)
@api.multi
def message_get_reply_to(self, default=None):
res = {}
default = default or (self.env.context.get('email_from') and self.env.context['email_from'] or None)
for ticket in self:
if ticket.team_id.alias_name and ticket.team_id.alias_domain:
res[ticket.id] = ticket.team_id.alias_name + '@' + ticket.team_id.alias_domain
elif ticket.team_id.company_id.mail_server_id:
res[ticket.id] = ticket.team_id.company_id.mail_server_id.smtp_user
else:
res[ticket.id] = super(HelpdeskTicket, self).message_get_reply_to(default=default)[ticket.id]
return res
|
sumihai-tekindo/helpdesk_sicepat
|
helpdesk_sicepat/models/helpdesk.py
|
Python
|
gpl-3.0
| 51,837
|
#!/usr/bin/python
import versioning_base
from pyspatialite import dbapi2
import psycopg2
import os
import shutil
test_data_dir = os.path.dirname(os.path.realpath(__file__))
tmp_dir = "/tmp"
# create the test database
os.system("dropdb epanet_test_db")
os.system("createdb epanet_test_db")
os.system("psql epanet_test_db -c 'CREATE EXTENSION postgis'")
os.system("psql epanet_test_db -f "+test_data_dir+"/issue437_test_db.sql")
# try the update
wc = [tmp_dir+"/issue437_wc0.sqlite", tmp_dir+"/issue437_wc1.sqlite"]
for f in wc:
if os.path.isfile(f): os.remove(f)
versioning_base.checkout("dbname=epanet_test_db", ['epanet_trunk_rev_head.junctions', 'epanet_trunk_rev_head.pipes'], f)
scur = []
for f in wc: scur.append(versioning_base.Db( dbapi2.connect( f ) ))
scur[0].execute("INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('2','1','2',GeomFromText('LINESTRING(1 1,0 1)',2154))")
scur[0].execute("INSERT INTO pipes_view(id, start_node, end_node, GEOMETRY) VALUES ('3','1','2',GeomFromText('LINESTRING(1 -1,0 1)',2154))")
scur[0].commit()
versioning_base.commit( wc[0], 'commit 1 wc0', "dbname=epanet_test_db")
versioning_base.update( wc[1], "dbname=epanet_test_db" )
scur[0].execute("UPDATE pipes_view SET length = 1")
scur[0].commit()
scur[1].execute("UPDATE pipes_view SET length = 2")
scur[1].execute("UPDATE pipes_view SET length = 3")
scur[1].commit()
versioning_base.commit( wc[0], "commit 2 wc0", "dbname=epanet_test_db" )
scur[0].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes")
print '################'
for r in scur[0].fetchall():
print r
scur[0].execute("UPDATE pipes_view SET length = 2")
scur[0].execute("DELETE FROM pipes_view WHERE OGC_FID = 6")
scur[0].commit()
versioning_base.commit( wc[0], "commit 3 wc0", "dbname=epanet_test_db" )
scur[0].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes")
print '################'
for r in scur[0].fetchall():
print r
versioning_base.update( wc[1], "dbname=epanet_test_db" )
scur[1].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes_diff")
print '################ diff'
for r in scur[1].fetchall():
print r
scur[1].execute("SELECT conflict_id FROM pipes_conflicts")
assert( len(scur[1].fetchall()) == 6 ) # there must be conflicts
scur[1].execute("SELECT conflict_id,origin,action,OGC_FID,trunk_parent,trunk_child FROM pipes_conflicts")
print '################'
for r in scur[1].fetchall():
print r
scur[1].execute("DELETE FROM pipes_conflicts WHERE origin='theirs' AND conflict_id=1")
scur[1].commit()
scur[1].execute("SELECT conflict_id FROM pipes_conflicts")
assert( len(scur[1].fetchall()) == 4 ) # there must be two removed entries
scur[1].execute("SELECT conflict_id,origin,action,OGC_FID,trunk_parent,trunk_child FROM pipes_conflicts")
print '################'
for r in scur[1].fetchall():
print r
scur[1].execute("DELETE FROM pipes_conflicts WHERE origin='mine' AND OGC_FID = 11")
scur[1].execute("DELETE FROM pipes_conflicts WHERE origin='theirs'")
scur[1].commit()
scur[1].execute("SELECT conflict_id FROM pipes_conflicts")
assert( len(scur[1].fetchall()) == 0 ) # there must be no conflict
scur[1].execute("SELECT OGC_FID,length,trunk_rev_begin,trunk_rev_end,trunk_parent,trunk_child FROM pipes")
print '################'
for r in scur[1].fetchall():
print r
|
enricofer/qgis-versioning
|
test/issue437_test.py
|
Python
|
gpl-2.0
| 3,445
|
import os
import sys
import copy
import logging
from checker import *
from .ofp import register_ofp_creators
from .ofp import OfpBase
from .utils import get_attrs_without_len
# YAML:
# get_async_reply:
# packet_in_mask:
# - 0x0
# - 0x0
# port_status_mask:
# - 0x0
# - 0x0
# flow_removed_mask:
# - 0x0
# - 0x0
SCE_GET_ASYNC_REPLY = "get_async_reply"
@register_ofp_creators(SCE_GET_ASYNC_REPLY)
class OfpGetAsyncReplyCreator(OfpBase):
@classmethod
def create(cls, test_case_obj, dp, ofproto, ofp_parser, params):
# GetAsyncReply.
kws = copy.deepcopy(params)
# create GetAsyncReply.
msg = ofp_parser.OFPGetAsyncReply(dp, **kws)
msg._set_targets(get_attrs_without_len(msg))
return msg
|
lagopus/lagopus
|
test/integration_test/tools/lib/ofp/ofp_get_async_reply.py
|
Python
|
apache-2.0
| 774
|
from flask import current_app, request, Response, get_flashed_messages
from flask.ext.login import current_user
class NotModified(Exception):
pass
def handle_not_modified(exc):
return Response(status=304)
def setup_caching():
request._http_cache = current_app.config.get('CACHE')
request._http_etag = None
def disable_cache():
request._http_cache = False
def cache_response(resp):
# if not hasattr(request, '_http_cache') \
# or not getattr(request, "_http_cache", True) \
# or request.method not in ['GET', 'HEAD', 'OPTIONS'] \
# or resp.status_code > 399 \
# or resp.is_streamed \
# or len(get_flashed_messages()):
# resp.cache_control.no_cache = True
# return resp
# resp.cache_control.max_age = 3600 * 6
# # resp.cache_control.must_revalidate = True
# if current_user.is_authenticated():
# resp.cache_control.private = True
# else:
# resp.cache_control.public = True
# if request._http_etag is None:
# etag_cache_keygen()
# resp.set_etag(request._http_etag)
return resp
def etag_cache_keygen(*keys):
# if not request._http_cache:
# return
# args = sorted(set(request.args.items()))
# # jquery where is your god now?!?
# args = filter(lambda (k, v): k != '_', args)
# request._http_etag = cache_hash(args, current_user,
# keys)
# if request.if_none_match == request._http_etag:
# raise NotModified()
return
|
nathanhilbert/flaskboiler
|
flaskboiler/views/cache.py
|
Python
|
agpl-3.0
| 1,563
|
"""DEPRECATED UI FUNCTIONALITY"""
# """
# from nailgun import entities
# from robottelo import manifests
# from robottelo.api.utils import call_entity_method_with_timeout
# from robottelo.config import settings
# from robottelo.constants import (
# DISTRO_RHEL7,
# ENVIRONMENT,
# REAL_RHEL7_0_0_PACKAGE,
# REAL_RHEL7_0_1_PACKAGE,
# REAL_RHEL7_0_ERRATA_ID,
# REPOS,
# REPOSET,
# PRDS,
# )
# from robottelo.cli.factory import (
# setup_cdn_and_custom_repositories,
# setup_virtual_machine,
# )
# from robottelo.decorators import (
# run_in_one_thread,
# skip_if_not_set,
# stubbed,
# tier1,
# tier2,
# )
# from robottelo.test import UITestCase
# from robottelo.ui.factory import set_context
# from robottelo.ui.locators import common_locators, locators, tab_locators
# from robottelo.ui.session import Session
# from robottelo.vm import VirtualMachine
# org_environment_full_message = (
# 'Access to repositories is unrestricted in this organization. Hosts can'
# ' consume all repositories available in the Content View they are '
# 'registered to, regardless of subscription status.'
# )
# @run_in_one_thread
# class ContentAccessTestCase(UITestCase):
# """Implements Content Access (Golden Ticket) tests in UI"""
# @classmethod
# def set_session_org(cls):
# """Create an organization for tests, which will be selected
# automatically
# This method should set `session_org` to a new Org or reuse existing
# org that has Golden ticket enabled
# """
# cls.session_org = entities.Organization().create()
# @classmethod
# @skip_if_not_set('clients', 'fake_manifest')
# def setUpClass(cls):
# """Setup must ensure the current `session_org` has Golden Ticket
# enabled.
# Option 1) SQL::
# UPDATE
# cp_owner
# SET
# content_access_mode = 'org_environment',
# content_access_mode_list='entitlement,org_environment'
# WHERE account='{org.label}';
# Option 2) manifest::
# Change manifest file as it looks like:
# Consumer:
# Name: ExampleCorp
# UUID: c319a1d8-4b30-44cd-b2cf-2ccba4b9a8db
# Content Access Mode: org_environment
# Type: satellite
# :steps:
# 1. Create a Product and CV for current session_org.
# 2. Use either option 1 or option 2 (described above) to activate
# the Golden Ticket.
# 3. Add a repository pointing to a real repo which requires a
# RedHat subscription to access.
# 4. Create Content Host and assign that gated repos to it.
# 5. Sync the gated repository.
# """
# super(ContentAccessTestCase, cls).setUpClass()
# # upload organization manifest with org environment access enabled
# manifests.upload_manifest_locked(
# cls.session_org.id,
# manifests.clone(org_environment_access=True)
# )
# # Create repositories
# cls.repos = [
# # Red Hat Enterprise Linux 7
# {
# 'product': PRDS['rhel'],
# 'repository-set': REPOSET['rhel7'],
# 'repository': REPOS['rhel7']['name'],
# 'repository-id': REPOS['rhel7']['id'],
# 'releasever': REPOS['rhel7']['releasever'],
# 'arch': REPOS['rhel7']['arch'],
# 'cdn': True,
# },
# # Red Hat Satellite Tools
# {
# 'product': PRDS['rhel'],
# 'repository-set': REPOSET['rhst7'],
# 'repository': REPOS['rhst7']['name'],
# 'repository-id': REPOS['rhst7']['id'],
# 'url': settings.sattools_repo['rhel7'],
# 'cdn': bool(
# settings.cdn or not settings.sattools_repo['rhel7']),
# },
# ]
# cls.custom_product, cls.repos_info = setup_cdn_and_custom_repositories(
# cls.session_org.id, cls.repos)
# # Create a content view
# content_view = entities.ContentView(
# organization=cls.session_org,
# repository=[entities.Repository(id=repo_info['id'])
# for repo_info in cls.repos_info],
# ).create()
# # Publish the content view
# call_entity_method_with_timeout(content_view.publish, timeout=1500)
# cls.content_view = content_view.read()
# # create an activation only for testing org environment info message
# # displayed tests
# cls.activation_key = entities.ActivationKey(
# organization=cls.session_org).create()
# def _setup_virtual_machine(self, vm):
# """Make the initial virtual machine setup
# :param VirtualMachine vm: The virtual machine setup
# """
# setup_virtual_machine(
# vm,
# self.session_org.label,
# rh_repos_id=[
# repo['repository-id'] for repo in self.repos if repo['cdn']
# ],
# product_label=self.custom_product['label'],
# repos_label=[
# repo['label'] for repo in self.repos_info
# if repo['red-hat-repository'] == 'no'
# ],
# lce=ENVIRONMENT,
# patch_os_release_distro=DISTRO_RHEL7,
# install_katello_agent=True,
# )
# @tier2
# def test_positive_list_installable_updates(self):
# """Access content hosts and assert all updates are listed on
# packages tab updates and not only those for attached subscriptions.
# :id: 30783c91-c665-4c39-8b3b-b7456bde76f2
# :steps:
# 1. Access Content-Host listing page.
# :CaseAutomation: notautomated
# :expectedresults:
# 1. All updates are available independent of subscription because
# Golden Ticket is enabled.
# """
# with VirtualMachine(distro=DISTRO_RHEL7) as vm:
# self._setup_virtual_machine(vm)
# # install a the packages that has updates with errata
# result = vm.run(
# 'yum install -y {0}'.format(REAL_RHEL7_0_0_PACKAGE))
# self.assertEqual(result.return_code, 0)
# result = vm.run('rpm -q {0}'.format(REAL_RHEL7_0_0_PACKAGE))
# self.assertEqual(result.return_code, 0)
# # check that package errata is applicable
# with Session(self) as session:
# set_context(session, org=self.session_org.name)
# self.assertIsNotNone(
# session.contenthost.errata_search(
# vm.hostname, REAL_RHEL7_0_ERRATA_ID)
# )
# @tier2
# def test_positive_list_available_packages(self):
# """Access content hosts and assert all packages are listed on
# installable updates and not only those for attached subscriptions.
# :id: 37383e25-7b1d-433e-9e05-faaa8ec70ee8
# :steps:
# 1. Access Content-Host Packages tab.
# :CaseAutomation: notautomated
# :expectedresults:
# 1. All packages are available independent
# of subscription because Golden Ticket is enabled.
# """
# with VirtualMachine(distro=DISTRO_RHEL7) as vm:
# self._setup_virtual_machine(vm)
# # install a the packages that has updates with errata
# result = vm.run(
# 'yum install -y {0}'.format(REAL_RHEL7_0_0_PACKAGE))
# self.assertEqual(result.return_code, 0)
# result = vm.run('rpm -q {0}'.format(REAL_RHEL7_0_0_PACKAGE))
# self.assertEqual(result.return_code, 0)
# # force host to generate/refresh errata applicability
# host = entities.Host(
# name=vm.hostname,
# organization=self.session_org
# ).search()[0].read()
# call_entity_method_with_timeout(
# host.errata_applicability, timeout=600)
# # check that package errata is applicable
# with Session(self) as session:
# set_context(session, org=self.session_org.name)
# self.assertIsNotNone(
# session.contenthost.package_search(
# vm.hostname,
# REAL_RHEL7_0_1_PACKAGE,
# package_tab='applicable'
# )
# )
# @tier1
# def test_positive_visual_indicator_on_hosts_subscription(self):
# """Access content hosts subscription tab and assert a visual indicator
# is present highlighting that organization hosts have unrestricted
# access to repository content.
# :id: f8fc0bd2-c92f-4706-9921-4e331762170d
# :steps:
# 1. Access Content-Host Subscription tab.
# :CaseAutomation: automated
# :expectedresults:
# 1. A visual alert is present at the top of the subscription tab
# saying: "Access to repositories is unrestricted in
# this organization. Hosts can consume all repositories available
# in the Content View they are registered to, regardless of
# subscription status".
# :CaseImportance: Critical
# """
# with VirtualMachine(distro=DISTRO_RHEL7) as client:
# client.install_katello_ca()
# client.register_contenthost(
# self.session_org.label, lce=ENVIRONMENT)
# self.assertTrue(client.subscribed)
# with Session(self) as session:
# set_context(session, org=self.session_org.name)
# session.contenthost.search_and_click(client.hostname)
# session.contenthost.click(
# tab_locators['contenthost.tab_subscriptions'])
# session.contenthost.click(
# tab_locators[
# 'contenthost.tab_subscriptions_subscriptions'])
# info_element = session.subscriptions.wait_until_element(
# common_locators['org_environment_info'])
# self.assertIsNotNone(info_element)
# self.assertIn(
# org_environment_full_message,
# info_element.text
# )
# @tier1
# def test_positive_visual_indicator_on_activation_key_details(self):
# """Access AK details subscription tab and assert a visual indicator
# is present highlighting that organization hosts have unrestricted
# access to repository content.
# :id: 94ba1113-11cb-43b2-882e-bf45b5355d9b
# :steps:
# 1. Access Ak details Subscription tab.
# :CaseAutomation: automated
# :expectedresults:
# 1. A visual alert is present at the top of the subscription tab
# saying: "Access to repositories is unrestricted in this
# organization. Hosts can consume all repositories available in
# the Content View they are registered to, regardless of
# subscription status".
# :CaseImportance: Critical
# """
# with Session(self) as session:
# set_context(session, org=self.session_org.name)
# session.activationkey.search_and_click(self.activation_key.name)
# session.activationkey.click(tab_locators['ak.subscriptions'])
# info_element = session.subscriptions.wait_until_element(
# common_locators['org_environment_info'])
# self.assertIsNotNone(info_element)
# self.assertIn(
# org_environment_full_message,
# info_element.text
# )
# @tier1
# def test_positive_visual_indicator_on_manifest(self):
# """Access org manifest page and assert a visual indicator
# is present highlighting that organization hosts have unrestricted
# access to repository content.
# :id: a9c2d2b7-17ab-441b-978d-24dc80f35a4b
# :steps:
# 1. Access org manifest page.
# :CaseAutomation: automated
# :expectedresults:
# 1. A visual alert is present at the top of the
# subscription tab saying: "Access to repositories is unrestricted
# in this organization. Hosts can consume all repositories
# available in the Content View they are registered to, regardless
# of subscription status".
# :CaseImportance: Critical
# """
# with Session(self) as session:
# set_context(session, org=self.session_org.name)
# session.subscriptions.navigate_to_entity()
# if not session.subscriptions.wait_until_element(
# locators.subs.upload, timeout=1):
# session.subscriptions.click(locators.subs.manage_manifest)
# info_element = session.subscriptions.wait_until_element(
# common_locators['org_environment_info'])
# self.assertIsNotNone(info_element)
# self.assertIn(
# org_environment_full_message,
# info_element.text
# )
# @tier1
# def test_negative_visual_indicator_with_restricted_subscription(self):
# """Access AK details subscription tab and assert a visual indicator
# is NOT present if organization has no Golden Ticket Enabled.
# :id: ce5f3017-a449-45e6-8709-7d4f7b5f7a4d
# :steps:
# 1. Change to a restricted organization (with no GT enabled).
# 2. Access Ak details Subscription tab.
# :CaseAutomation: automated
# :expectedresults:
# 1. Assert GoldenTicket visual alert is NOT present.
# :CaseImportance: Critical
# """
# org = entities.Organization().create()
# self.upload_manifest(org.id, manifests.clone())
# activation_key = entities.ActivationKey(organization=org).create()
# with Session(self) as session:
# set_context(session, org=org.name, force_context=True)
# session.activationkey.search_and_click(activation_key.name)
# session.activationkey.click(tab_locators['ak.subscriptions'])
# self.assertIsNone(
# session.subscriptions.wait_until_element(
# common_locators['org_environment_info'])
# )
# @tier2
# @stubbed()
# def test_negative_list_available_packages(self):
# """Access content hosts and assert restricted packages are not listed
# on installable updates but only those for attached subscriptions.
# :id: 87a502ff-bb3c-4da4-ab88-b49a4fcdf3fb
# :steps:
# 1. Change to a restricted organization (with no GT enabled).
# 2. Access Content-Host Packages tab.
# :CaseAutomation: notautomated
# :expectedresults:
# 1. Restricted packages are NOT available but only
# those for atached subscriptions because Golden Ticket is NOT
# enabled.
# """
|
ldjebran/robottelo
|
tests/foreman/ui_deprecated/test_contentaccess.py
|
Python
|
gpl-3.0
| 15,538
|
#!/usr/bin/env python
"""
Listens to github hooks at /poetroid and runs literatti and sends results to s3
"""
from flask import Flask, request
import json
from s3 import build
import os
app = Flask(__name__)
@app.route('/poetroid',methods=['POST'])
def gh_hook():
# check request uri from github
os.system('echo "%s"' % (deploy_time))
payload = request.form.get('payload')
data = json.loads(payload)
deploy_time = build()
os.system('echo "%s"' % (deploy_time))
return "Done"
if __name__ == '__main__':
app.run()
|
facjure/poetroid-prototype
|
scripts/ghhook.py
|
Python
|
epl-1.0
| 553
|
from prismriver.plugin.common import Plugin
from prismriver.struct import Song
class SnakiePlugin(Plugin):
ID = 'snakie'
def __init__(self, config):
super(SnakiePlugin, self).__init__('Snakie\'s Obsession', config)
def search_song(self, artist, title):
to_delete = ['.', ',', '!', '?', '(', ')', '~', '/', "'", '"']
to_replace = [' ']
link = 'http://lyrics.snakeroot.ru/{}/{}/{}_{}.html'.format(
self.prepare_url_parameter(artist[0].upper()),
self.prepare_url_parameter(artist, to_delete=to_delete, to_replace=to_replace, delimiter='_'),
self.prepare_url_parameter(artist, to_delete=to_delete, to_replace=to_replace, delimiter='_').lower(),
self.prepare_url_parameter(title, to_delete=to_delete, to_replace=to_replace, delimiter='_').lower())
# return 404 if song not found
page = self.download_webpage_text(link)
if page:
soup = self.prepare_soup(page)
main_pane = soup.find('div', {'id': 'content'})
title_pane = main_pane.find('h2', recursive=False)
song_artist = title_pane.a.text
song_title = title_pane.a.next_sibling[3:]
# searching for first non-empty paragraph
for lyrics_pane in reversed(main_pane.findAll('p')):
if lyrics_pane.text.strip() != '':
lyrics = self.parse_verse_block(lyrics_pane)
return Song(song_artist, song_title, self.sanitize_lyrics([lyrics]))
|
anlar/prismriver
|
prismriver/plugin/snakie.py
|
Python
|
mit
| 1,536
|
#!/usr/bin/env python
import sys, getopt, requests, datetime, urllib, grequests
def call_reqs(i, len_all_urls, inner_urls, file_name):
reqs = [ua[0] for ua in inner_urls]
artists = [ua[1] for ua in inner_urls]
sys.stdout.write(datetime.datetime.now().strftime('%d-%m-%Y-%H:%M:%S') + ' - ' + str(i) + '/' + str(len_all_urls) + ' ')
resps = grequests.map(reqs)
print '!'
for j, resp in enumerate(resps):
if not resp:
continue
if resp.status_code == requests.codes.ok:
if resp.text == '# Empty NT\n':
continue
with open(file_name, 'a+') as f:
result = resp.json()['results']['bindings']
if result:
artist = result[0]['artist']
if artist['type'] == 'uri':
f.write('"' + unicode(artists[j], "utf-8").encode("utf-8").lower())
f.write('":"' + artist['value'] + '",')
f.flush()
f.close()
else:
print unicode(artists[j], 'utf-8') + ' data could not be retrieved...\n\n' + resp.text
def main(argv=None):
query_url = 'http://linkedbrainz.org/sparql?default-graph-uri=&query=select+%3Fartist%0D%0Awhere%7B%0D%0A++%3Fartist+a+%3Chttp%3A%2F%2Fpurl.org%2Fontology%2Fmo%2FMusicArtist%3E+.+%0D%0A++%3Fartist+%3Chttp%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2Fname%3E+%3FARTIST_NAME+.%0D%0A++filter+%28lcase%28%3FARTIST_NAME%29+%3D+lcase%28%22{0}%22%29%29%0D%0A%7D%0D%0A&format=application%2Fsparql-results%2Bjson&timeout=0&debug=on'
file_name = 'result-artists-with-uri' + datetime.datetime.now().strftime('%d-%m-%Y-%H:%M:%S') + '.txt'
all_urls = []
inner_urls = []
with open('artists.txt', 'r') as input_file:
artists = input_file.readlines()
i = 0
len_all_urls = (len(artists) / 100) + 1
print 'We are going to make parallel requests ' + str(len_all_urls) + ' times'
for artist in artists:
artist = artist.replace('\n', '').strip()
url = query_url.replace('{0}', unicode(urllib.quote(artist, ''), 'utf-8'))
inner_urls.append( (grequests.get(url), artist) )
if len(inner_urls) == 100:
i += 1
call_reqs(i, len_all_urls, inner_urls, file_name)
inner_urls = []
if inner_urls:
i += 1
call_reqs(i, len_all_urls, inner_urls, file_name)
inner_urls = []
if __name__ == "__main__":
sys.exit(main())
|
tdopires/musicmap
|
scripts/query-artists-uris-parallel.py
|
Python
|
apache-2.0
| 2,575
|
# -*- coding: utf-8 -*-
__author__ = """Chris Tabor (dxdstudio@gmail.com)"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from MOAL.helpers.display import Section
from MOAL.helpers.display import print_h3
from MOAL.helpers.display import print_h4
from time import sleep
from math import factorial
DEBUG = True if __name__ == '__main__' else False
class BaseDataType(object):
"""The irony here is that these data types take up significantly
more actual memory as a class representation in a bytecode interpreted
language. The classing is merely for intuitive understanding and
representing the hierarchy/operations."""
@staticmethod
def _alter(old_bit, new_bit, value):
"""An interesting visualization of flipping bits --
not very robust; primarily meant as a basic learning exercise
-- see other relevant modules for more specific implementations."""
if not isinstance(value, str):
raise TypeError('Requires a string version to prevent coercion.')
curr_index = len(value) - 1
# Convert the binary values to a list for operating on
new = list(value)
# Store a copy string representation for visual comparisons
old = ''.join(new)
# Keep moving right until we get to the next on/off bit
# (depending on whether we're incrementing or decrementing)
while new[curr_index] == old_bit:
curr_index -= 1
new[curr_index + 1] = new_bit
try:
new[curr_index] = old_bit
except IndexError:
pass
new = ''.join(new)
value = new
return old, new
@staticmethod
def decrement(value):
old, new = BaseDataType._alter('0', '1', value)
if DEBUG:
print('\ndecrement: \n{}\n{}'.format(old, new))
return new
@staticmethod
def increment(value):
old, new = BaseDataType._alter('1', '0', value)
if DEBUG:
print('\nincrement: \n{}\n{}'.format(old, new))
return new
@staticmethod
def add(binval, amount):
for n in range(amount):
binval = BaseDataType.increment(binval)
return binval
@staticmethod
def subtract(binval, amount):
for n in range(amount):
binval = BaseDataType.decrement(binval)
return binval
@staticmethod
def multiply(binval, amount):
# Just defer to increment,
# e.g. 10 * 2 = 20, |10 - 20| = 10 ... inc. 10
product = int(binval) * amount
diff = abs(int(binval) - product)
for n in range(diff):
binval = BaseDataType.increment(binval)
return binval
@staticmethod
def divide(binval, amount):
# Just defer to decrement,
# e.g. 10 / 2 = 5, 10 - 5 = 5 ... dec. 5
quotient = int(binval) // amount
diff = abs(int(binval) - quotient)
for n in range(diff):
binval = BaseDataType.decrement(binval)
return binval
def get_least_significant(self):
return self.value[-1]
def get_most_significant(self):
return self.value[0]
def get_max_binvals(self):
if not hasattr(self, 'value'):
raise TypeError
else:
val = len(self.value)
return 2 if val == 1 else factorial(val)
def __str__(self):
if self.value:
return '[{}]'.format(self.value)
else:
return ''
class Bit(BaseDataType):
def __str__(self):
return '[{}]'.format('] ['.join(self.value))
def __init__(self, value):
self.bits = []
self.value = value
class Nibble(Bit):
def __init__(self, value):
self.length = 4
super(Nibble, self).__init__(value)
for char in value:
self.bits.append(Bit(char))
class Byte(Nibble):
"""Also sometimes referred to as an `Octet`"""
def __init__(self, value):
self.length = 8
super(Byte, self).__init__(value)
class Halfword(Nibble):
def __init__(self, value):
self.length = 16
super(Halfword, self).__init__(value)
class Word(Nibble):
def __init__(self, value):
self.length = 32
super(Word, self).__init__(value)
def update_animation(steps, func, instance):
for _ in range(steps):
sleep(0.05)
instance.value = func(instance.value)
bin_inc = BaseDataType.increment
bin_dec = BaseDataType.decrement
if __name__ == '__main__':
with Section('Computer organization - Data types'):
print_h4('Bit', desc='The simplest unit of information.')
bit = Bit('0')
assert bit.get_max_binvals() == 2
print_h3('Nibble', desc='4 bits = 1/2 byte.')
nibble = Nibble('0000')
print(nibble)
assert nibble.get_max_binvals() == 24
print_h3('Byte', desc='8 bits = one byte.')
byte = Byte('00000000')
print(byte)
assert byte.get_max_binvals() == 40320
orig = byte.value
# Flip bits then reverse and check the value to test things
# are working correctly.
update_animation(32, byte.increment, byte)
update_animation(32, byte.decrement, byte)
assert byte.value == orig
|
christabor/MoAL
|
MOAL/computer_organization/data_types.py
|
Python
|
apache-2.0
| 5,327
|
#!/usr/bin/env python
#
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Writes a build_config file.
The build_config file for a target is a json file containing information about
how to build that target based on the target's dependencies. This includes
things like: the javac classpath, the list of android resources dependencies,
etc. It also includes the information needed to create the build_config for
other targets that depend on that one.
Android build scripts should not refer to the build_config directly, and the
build specification should instead pass information in using the special
file-arg syntax (see build_utils.py:ExpandFileArgs). That syntax allows passing
of values in a json dict in a file and looks like this:
--python-arg=@FileArg(build_config_path:javac:classpath)
Note: If paths to input files are passed in this way, it is important that:
1. inputs/deps of the action ensure that the files are available the first
time the action runs.
2. Either (a) or (b)
a. inputs/deps ensure that the action runs whenever one of the files changes
b. the files are added to the action's depfile
"""
import optparse
import os
import sys
import xml.dom.minidom
from util import build_utils
import write_ordered_libraries
class AndroidManifest(object):
def __init__(self, path):
self.path = path
dom = xml.dom.minidom.parse(path)
manifests = dom.getElementsByTagName('manifest')
assert len(manifests) == 1
self.manifest = manifests[0]
def GetInstrumentation(self):
instrumentation_els = self.manifest.getElementsByTagName('instrumentation')
if len(instrumentation_els) == 0:
return None
if len(instrumentation_els) != 1:
raise Exception(
'More than one <instrumentation> element found in %s' % self.path)
return instrumentation_els[0]
def CheckInstrumentation(self, expected_package):
instr = self.GetInstrumentation()
if not instr:
raise Exception('No <instrumentation> elements found in %s' % self.path)
instrumented_package = instr.getAttributeNS(
'http://schemas.android.com/apk/res/android', 'targetPackage')
if instrumented_package != expected_package:
raise Exception(
'Wrong instrumented package. Expected %s, got %s'
% (expected_package, instrumented_package))
def GetPackageName(self):
return self.manifest.getAttribute('package')
dep_config_cache = {}
def GetDepConfig(path):
if not path in dep_config_cache:
dep_config_cache[path] = build_utils.ReadJson(path)['deps_info']
return dep_config_cache[path]
def DepsOfType(wanted_type, configs):
return [c for c in configs if c['type'] == wanted_type]
def GetAllDepsConfigsInOrder(deps_config_paths):
def GetDeps(path):
return set(GetDepConfig(path)['deps_configs'])
return build_utils.GetSortedTransitiveDependencies(deps_config_paths, GetDeps)
class Deps(object):
def __init__(self, direct_deps_config_paths):
self.all_deps_config_paths = GetAllDepsConfigsInOrder(
direct_deps_config_paths)
self.direct_deps_configs = [
GetDepConfig(p) for p in direct_deps_config_paths]
self.all_deps_configs = [
GetDepConfig(p) for p in self.all_deps_config_paths]
def All(self, wanted_type=None):
if type is None:
return self.all_deps_configs
return DepsOfType(wanted_type, self.all_deps_configs)
def Direct(self, wanted_type=None):
if wanted_type is None:
return self.direct_deps_configs
return DepsOfType(wanted_type, self.direct_deps_configs)
def AllConfigPaths(self):
return self.all_deps_config_paths
def main(argv):
parser = optparse.OptionParser()
build_utils.AddDepfileOption(parser)
parser.add_option('--build-config', help='Path to build_config output.')
parser.add_option(
'--type',
help='Type of this target (e.g. android_library).')
parser.add_option(
'--possible-deps-configs',
help='List of paths for dependency\'s build_config files. Some '
'dependencies may not write build_config files. Missing build_config '
'files are handled differently based on the type of this target.')
# android_resources options
parser.add_option('--srcjar', help='Path to target\'s resources srcjar.')
parser.add_option('--resources-zip', help='Path to target\'s resources zip.')
parser.add_option('--r-text', help='Path to target\'s R.txt file.')
parser.add_option('--package-name',
help='Java package name for these resources.')
parser.add_option('--android-manifest', help='Path to android manifest.')
# java library options
parser.add_option('--jar-path', help='Path to target\'s jar output.')
parser.add_option('--supports-android', action='store_true',
help='Whether this library supports running on the Android platform.')
parser.add_option('--requires-android', action='store_true',
help='Whether this library requires running on the Android platform.')
parser.add_option('--bypass-platform-checks', action='store_true',
help='Bypass checks for support/require Android platform.')
# android library options
parser.add_option('--dex-path', help='Path to target\'s dex output.')
# native library options
parser.add_option('--native-libs', help='List of top-level native libs.')
parser.add_option('--readelf-path', help='Path to toolchain\'s readelf.')
parser.add_option('--tested-apk-config',
help='Path to the build config of the tested apk (for an instrumentation '
'test apk).')
options, args = parser.parse_args(argv)
if args:
parser.error('No positional arguments should be given.')
if not options.type in [
'java_library', 'android_resources', 'android_apk', 'deps_dex']:
raise Exception('Unknown type: <%s>' % options.type)
required_options = ['build_config'] + {
'java_library': ['jar_path'],
'android_resources': ['resources_zip'],
'android_apk': ['jar_path', 'dex_path', 'resources_zip'],
'deps_dex': ['dex_path']
}[options.type]
if options.native_libs:
required_options.append('readelf_path')
build_utils.CheckOptions(options, parser, required_options)
if options.type == 'java_library':
if options.supports_android and not options.dex_path:
raise Exception('java_library that supports Android requires a dex path.')
if options.requires_android and not options.supports_android:
raise Exception(
'--supports-android is required when using --requires-android')
possible_deps_config_paths = build_utils.ParseGypList(
options.possible_deps_configs)
allow_unknown_deps = options.type == 'android_apk'
unknown_deps = [
c for c in possible_deps_config_paths if not os.path.exists(c)]
if unknown_deps and not allow_unknown_deps:
raise Exception('Unknown deps: ' + str(unknown_deps))
direct_deps_config_paths = [
c for c in possible_deps_config_paths if not c in unknown_deps]
deps = Deps(direct_deps_config_paths)
direct_library_deps = deps.Direct('java_library')
all_library_deps = deps.All('java_library')
direct_resources_deps = deps.Direct('android_resources')
all_resources_deps = deps.All('android_resources')
# Resources should be ordered with the highest-level dependency first so that
# overrides are done correctly.
all_resources_deps.reverse()
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_resources_deps = tested_apk_deps.All('android_resources')
all_resources_deps = [
d for d in all_resources_deps if not d in tested_apk_resources_deps]
# Initialize some common config.
config = {
'deps_info': {
'name': os.path.basename(options.build_config),
'path': options.build_config,
'type': options.type,
'deps_configs': direct_deps_config_paths,
}
}
deps_info = config['deps_info']
if options.type == 'java_library' and not options.bypass_platform_checks:
deps_info['requires_android'] = options.requires_android
deps_info['supports_android'] = options.supports_android
deps_require_android = (all_resources_deps +
[d['name'] for d in all_library_deps if d['requires_android']])
deps_not_support_android = (
[d['name'] for d in all_library_deps if not d['supports_android']])
if deps_require_android and not options.requires_android:
raise Exception('Some deps require building for the Android platform: ' +
str(deps_require_android))
if deps_not_support_android and options.supports_android:
raise Exception('Not all deps support the Android platform: ' +
str(deps_not_support_android))
if options.type in ['java_library', 'android_apk']:
javac_classpath = [c['jar_path'] for c in direct_library_deps]
java_full_classpath = [c['jar_path'] for c in all_library_deps]
deps_info['resources_deps'] = [c['path'] for c in all_resources_deps]
deps_info['jar_path'] = options.jar_path
if options.type == 'android_apk' or options.supports_android:
deps_info['dex_path'] = options.dex_path
config['javac'] = {
'classpath': javac_classpath,
}
config['java'] = {
'full_classpath': java_full_classpath
}
if options.type == 'java_library':
# Only resources might have srcjars (normal srcjar targets are listed in
# srcjar_deps). A resource's srcjar contains the R.java file for those
# resources, and (like Android's default build system) we allow a library to
# refer to the resources in any of its dependents.
config['javac']['srcjars'] = [
c['srcjar'] for c in direct_resources_deps if 'srcjar' in c]
if options.type == 'android_apk':
# Apks will get their resources srcjar explicitly passed to the java step.
config['javac']['srcjars'] = []
if options.type == 'android_resources':
deps_info['resources_zip'] = options.resources_zip
if options.srcjar:
deps_info['srcjar'] = options.srcjar
if options.android_manifest:
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if options.package_name:
deps_info['package_name'] = options.package_name
if options.r_text:
deps_info['r_text'] = options.r_text
if options.type == 'android_resources' or options.type == 'android_apk':
config['resources'] = {}
config['resources']['dependency_zips'] = [
c['resources_zip'] for c in all_resources_deps]
config['resources']['extra_package_names'] = []
config['resources']['extra_r_text_files'] = []
if options.type == 'android_apk':
config['resources']['extra_package_names'] = [
c['package_name'] for c in all_resources_deps if 'package_name' in c]
config['resources']['extra_r_text_files'] = [
c['r_text'] for c in all_resources_deps if 'r_text' in c]
if options.type in ['android_apk', 'deps_dex']:
deps_dex_files = [c['dex_path'] for c in all_library_deps]
# An instrumentation test apk should exclude the dex files that are in the apk
# under test.
if options.type == 'android_apk' and options.tested_apk_config:
tested_apk_deps = Deps([options.tested_apk_config])
tested_apk_library_deps = tested_apk_deps.All('java_library')
tested_apk_deps_dex_files = [c['dex_path'] for c in tested_apk_library_deps]
deps_dex_files = [
p for p in deps_dex_files if not p in tested_apk_deps_dex_files]
tested_apk_config = GetDepConfig(options.tested_apk_config)
expected_tested_package = tested_apk_config['package_name']
AndroidManifest(options.android_manifest).CheckInstrumentation(
expected_tested_package)
# Dependencies for the final dex file of an apk or a 'deps_dex'.
if options.type in ['android_apk', 'deps_dex']:
config['final_dex'] = {}
dex_config = config['final_dex']
# TODO(cjhopman): proguard version
dex_config['dependency_dex_files'] = deps_dex_files
if options.type == 'android_apk':
config['dist_jar'] = {
'dependency_jars': [
c['jar_path'] for c in all_library_deps
]
}
manifest = AndroidManifest(options.android_manifest)
deps_info['package_name'] = manifest.GetPackageName()
if not options.tested_apk_config and manifest.GetInstrumentation():
# This must then have instrumentation only for itself.
manifest.CheckInstrumentation(manifest.GetPackageName())
library_paths = []
java_libraries_list = []
if options.native_libs:
libraries = build_utils.ParseGypList(options.native_libs)
if libraries:
libraries_dir = os.path.dirname(libraries[0])
write_ordered_libraries.SetReadelfPath(options.readelf_path)
write_ordered_libraries.SetLibraryDirs([libraries_dir])
all_native_library_deps = (
write_ordered_libraries.GetSortedTransitiveDependenciesForBinaries(
libraries))
# Create a java literal array with the "base" library names:
# e.g. libfoo.so -> foo
java_libraries_list = '{%s}' % ','.join(
['"%s"' % s[3:-3] for s in all_native_library_deps])
library_paths = map(
write_ordered_libraries.FullLibraryPath, all_native_library_deps)
config['native'] = {
'libraries': library_paths,
'java_libraries_list': java_libraries_list
}
build_utils.WriteJson(config, options.build_config, only_if_changed=True)
if options.depfile:
build_utils.WriteDepfile(
options.depfile,
deps.AllConfigPaths() + build_utils.GetPythonDependencies())
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
collinjackson/mojo
|
build/android/gyp/write_build_config.py
|
Python
|
bsd-3-clause
| 13,804
|
"""IPython extension to reload modules before executing user code.
``autoreload`` reloads modules automatically before entering the execution of
code typed at the IPython prompt.
This makes for example the following workflow possible:
.. sourcecode:: ipython
In [1]: %load_ext autoreload
In [2]: %autoreload 2
In [3]: from foo import some_function
In [4]: some_function()
Out[4]: 42
In [5]: # open foo.py in an editor and change some_function to return 43
In [6]: some_function()
Out[6]: 43
The module was reloaded without reloading it explicitly, and the object
imported with ``from foo import ...`` was also updated.
Usage
=====
The following magic commands are provided:
``%autoreload``
Reload all modules (except those excluded by ``%aimport``)
automatically now.
``%autoreload 0``
Disable automatic reloading.
``%autoreload 1``
Reload all modules imported with ``%aimport`` every time before
executing the Python code typed.
``%autoreload 2``
Reload all modules (except those excluded by ``%aimport``) every
time before executing the Python code typed.
``%aimport``
List modules which are to be automatically imported or not to be imported.
``%aimport foo``
Import module 'foo' and mark it to be autoreloaded for ``%autoreload 1``
``%aimport foo, bar``
Import modules 'foo', 'bar' and mark them to be autoreloaded for ``%autoreload 1``
``%aimport -foo``
Mark module 'foo' to not be autoreloaded.
Caveats
=======
Reloading Python modules in a reliable way is in general difficult,
and unexpected things may occur. ``%autoreload`` tries to work around
common pitfalls by replacing function code objects and parts of
classes previously in the module with new versions. This makes the
following things to work:
- Functions and classes imported via 'from xxx import foo' are upgraded
to new versions when 'xxx' is reloaded.
- Methods and properties of classes are upgraded on reload, so that
calling 'c.foo()' on an object 'c' created before the reload causes
the new code for 'foo' to be executed.
Some of the known remaining caveats are:
- Replacing code objects does not always succeed: changing a @property
in a class to an ordinary method or a method to a member variable
can cause problems (but in old objects only).
- Functions that are removed (eg. via monkey-patching) from a module
before it is reloaded are not upgraded.
- C extension modules cannot be reloaded, and so cannot be autoreloaded.
"""
skip_doctest = True
#-----------------------------------------------------------------------------
# Copyright (C) 2000 Thomas Heller
# Copyright (C) 2008 Pauli Virtanen <pav@iki.fi>
# Copyright (C) 2012 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#
# This IPython module is written by Pauli Virtanen, based on the autoreload
# code by Thomas Heller.
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import os
import sys
import traceback
import types
import weakref
from importlib import import_module
from importlib.util import source_from_cache
from imp import reload
#------------------------------------------------------------------------------
# Autoreload functionality
#------------------------------------------------------------------------------
class ModuleReloader(object):
enabled = False
"""Whether this reloader is enabled"""
check_all = True
"""Autoreload all modules, not just those listed in 'modules'"""
def __init__(self):
# Modules that failed to reload: {module: mtime-on-failed-reload, ...}
self.failed = {}
# Modules specially marked as autoreloadable.
self.modules = {}
# Modules specially marked as not autoreloadable.
self.skip_modules = {}
# (module-name, name) -> weakref, for replacing old code objects
self.old_objects = {}
# Module modification timestamps
self.modules_mtimes = {}
# Cache module modification times
self.check(check_all=True, do_reload=False)
def mark_module_skipped(self, module_name):
"""Skip reloading the named module in the future"""
try:
del self.modules[module_name]
except KeyError:
pass
self.skip_modules[module_name] = True
def mark_module_reloadable(self, module_name):
"""Reload the named module in the future (if it is imported)"""
try:
del self.skip_modules[module_name]
except KeyError:
pass
self.modules[module_name] = True
def aimport_module(self, module_name):
"""Import a module, and mark it reloadable
Returns
-------
top_module : module
The imported module if it is top-level, or the top-level
top_name : module
Name of top_module
"""
self.mark_module_reloadable(module_name)
import_module(module_name)
top_name = module_name.split('.')[0]
top_module = sys.modules[top_name]
return top_module, top_name
def filename_and_mtime(self, module):
if not hasattr(module, '__file__') or module.__file__ is None:
return None, None
if getattr(module, '__name__', None) in [None, '__mp_main__', '__main__']:
# we cannot reload(__main__) or reload(__mp_main__)
return None, None
filename = module.__file__
path, ext = os.path.splitext(filename)
if ext.lower() == '.py':
py_filename = filename
else:
try:
py_filename = source_from_cache(filename)
except ValueError:
return None, None
try:
pymtime = os.stat(py_filename).st_mtime
except OSError:
return None, None
return py_filename, pymtime
def check(self, check_all=False, do_reload=True):
"""Check whether some modules need to be reloaded."""
if not self.enabled and not check_all:
return
if check_all or self.check_all:
modules = list(sys.modules.keys())
else:
modules = list(self.modules.keys())
for modname in modules:
m = sys.modules.get(modname, None)
if modname in self.skip_modules:
continue
py_filename, pymtime = self.filename_and_mtime(m)
if py_filename is None:
continue
try:
if pymtime <= self.modules_mtimes[modname]:
continue
except KeyError:
self.modules_mtimes[modname] = pymtime
continue
else:
if self.failed.get(py_filename, None) == pymtime:
continue
self.modules_mtimes[modname] = pymtime
# If we've reached this point, we should try to reload the module
if do_reload:
try:
superreload(m, reload, self.old_objects)
if py_filename in self.failed:
del self.failed[py_filename]
except:
print("[autoreload of %s failed: %s]" % (
modname, traceback.format_exc(10)), file=sys.stderr)
self.failed[py_filename] = pymtime
#------------------------------------------------------------------------------
# superreload
#------------------------------------------------------------------------------
func_attrs = ['__code__', '__defaults__', '__doc__',
'__closure__', '__globals__', '__dict__']
def update_function(old, new):
"""Upgrade the code object of a function"""
for name in func_attrs:
try:
setattr(old, name, getattr(new, name))
except (AttributeError, TypeError):
pass
def update_class(old, new):
"""Replace stuff in the __dict__ of a class, and upgrade
method code objects, and add new methods, if any"""
for key in list(old.__dict__.keys()):
old_obj = getattr(old, key)
try:
new_obj = getattr(new, key)
# explicitly checking that comparison returns True to handle
# cases where `==` doesn't return a boolean.
if (old_obj == new_obj) is True:
continue
except AttributeError:
# obsolete attribute: remove it
try:
delattr(old, key)
except (AttributeError, TypeError):
pass
continue
if update_generic(old_obj, new_obj): continue
try:
setattr(old, key, getattr(new, key))
except (AttributeError, TypeError):
pass # skip non-writable attributes
for key in list(new.__dict__.keys()):
if key not in list(old.__dict__.keys()):
try:
setattr(old, key, getattr(new, key))
except (AttributeError, TypeError):
pass # skip non-writable attributes
def update_property(old, new):
"""Replace get/set/del functions of a property"""
update_generic(old.fdel, new.fdel)
update_generic(old.fget, new.fget)
update_generic(old.fset, new.fset)
def isinstance2(a, b, typ):
return isinstance(a, typ) and isinstance(b, typ)
UPDATE_RULES = [
(lambda a, b: isinstance2(a, b, type),
update_class),
(lambda a, b: isinstance2(a, b, types.FunctionType),
update_function),
(lambda a, b: isinstance2(a, b, property),
update_property),
]
UPDATE_RULES.extend([(lambda a, b: isinstance2(a, b, types.MethodType),
lambda a, b: update_function(a.__func__, b.__func__)),
])
def update_generic(a, b):
for type_check, update in UPDATE_RULES:
if type_check(a, b):
update(a, b)
return True
return False
class StrongRef(object):
def __init__(self, obj):
self.obj = obj
def __call__(self):
return self.obj
def superreload(module, reload=reload, old_objects=None):
"""Enhanced version of the builtin reload function.
superreload remembers objects previously in the module, and
- upgrades the class dictionary of every old class in the module
- upgrades the code object of every old function and method
- clears the module's namespace before reloading
"""
if old_objects is None:
old_objects = {}
# collect old objects in the module
for name, obj in list(module.__dict__.items()):
if not hasattr(obj, '__module__') or obj.__module__ != module.__name__:
continue
key = (module.__name__, name)
try:
old_objects.setdefault(key, []).append(weakref.ref(obj))
except TypeError:
pass
# reload module
try:
# clear namespace first from old cruft
old_dict = module.__dict__.copy()
old_name = module.__name__
module.__dict__.clear()
module.__dict__['__name__'] = old_name
module.__dict__['__loader__'] = old_dict['__loader__']
except (TypeError, AttributeError, KeyError):
pass
try:
module = reload(module)
except:
# restore module dictionary on failed reload
module.__dict__.update(old_dict)
raise
# iterate over all objects and update functions & classes
for name, new_obj in list(module.__dict__.items()):
key = (module.__name__, name)
if key not in old_objects: continue
new_refs = []
for old_ref in old_objects[key]:
old_obj = old_ref()
if old_obj is None: continue
new_refs.append(old_ref)
update_generic(old_obj, new_obj)
if new_refs:
old_objects[key] = new_refs
else:
del old_objects[key]
return module
#------------------------------------------------------------------------------
# IPython connectivity
#------------------------------------------------------------------------------
from IPython.core.magic import Magics, magics_class, line_magic
@magics_class
class AutoreloadMagics(Magics):
def __init__(self, *a, **kw):
super(AutoreloadMagics, self).__init__(*a, **kw)
self._reloader = ModuleReloader()
self._reloader.check_all = False
self.loaded_modules = set(sys.modules)
@line_magic
def autoreload(self, parameter_s=''):
r"""%autoreload => Reload modules automatically
%autoreload
Reload all modules (except those excluded by %aimport) automatically
now.
%autoreload 0
Disable automatic reloading.
%autoreload 1
Reload all modules imported with %aimport every time before executing
the Python code typed.
%autoreload 2
Reload all modules (except those excluded by %aimport) every time
before executing the Python code typed.
Reloading Python modules in a reliable way is in general
difficult, and unexpected things may occur. %autoreload tries to
work around common pitfalls by replacing function code objects and
parts of classes previously in the module with new versions. This
makes the following things to work:
- Functions and classes imported via 'from xxx import foo' are upgraded
to new versions when 'xxx' is reloaded.
- Methods and properties of classes are upgraded on reload, so that
calling 'c.foo()' on an object 'c' created before the reload causes
the new code for 'foo' to be executed.
Some of the known remaining caveats are:
- Replacing code objects does not always succeed: changing a @property
in a class to an ordinary method or a method to a member variable
can cause problems (but in old objects only).
- Functions that are removed (eg. via monkey-patching) from a module
before it is reloaded are not upgraded.
- C extension modules cannot be reloaded, and so cannot be
autoreloaded.
"""
if parameter_s == '':
self._reloader.check(True)
elif parameter_s == '0':
self._reloader.enabled = False
elif parameter_s == '1':
self._reloader.check_all = False
self._reloader.enabled = True
elif parameter_s == '2':
self._reloader.check_all = True
self._reloader.enabled = True
@line_magic
def aimport(self, parameter_s='', stream=None):
"""%aimport => Import modules for automatic reloading.
%aimport
List modules to automatically import and not to import.
%aimport foo
Import module 'foo' and mark it to be autoreloaded for %autoreload 1
%aimport foo, bar
Import modules 'foo', 'bar' and mark them to be autoreloaded for %autoreload 1
%aimport -foo
Mark module 'foo' to not be autoreloaded for %autoreload 1
"""
modname = parameter_s
if not modname:
to_reload = sorted(self._reloader.modules.keys())
to_skip = sorted(self._reloader.skip_modules.keys())
if stream is None:
stream = sys.stdout
if self._reloader.check_all:
stream.write("Modules to reload:\nall-except-skipped\n")
else:
stream.write("Modules to reload:\n%s\n" % ' '.join(to_reload))
stream.write("\nModules to skip:\n%s\n" % ' '.join(to_skip))
elif modname.startswith('-'):
modname = modname[1:]
self._reloader.mark_module_skipped(modname)
else:
for _module in ([_.strip() for _ in modname.split(',')]):
top_module, top_name = self._reloader.aimport_module(_module)
# Inject module to user namespace
self.shell.push({top_name: top_module})
def pre_run_cell(self):
if self._reloader.enabled:
try:
self._reloader.check()
except:
pass
def post_execute_hook(self):
"""Cache the modification times of any modules imported in this execution
"""
newly_loaded_modules = set(sys.modules) - self.loaded_modules
for modname in newly_loaded_modules:
_, pymtime = self._reloader.filename_and_mtime(sys.modules[modname])
if pymtime is not None:
self._reloader.modules_mtimes[modname] = pymtime
self.loaded_modules.update(newly_loaded_modules)
def load_ipython_extension(ip):
"""Load the extension in IPython."""
auto_reload = AutoreloadMagics(ip)
ip.register_magics(auto_reload)
ip.events.register('pre_run_cell', auto_reload.pre_run_cell)
ip.events.register('post_execute', auto_reload.post_execute_hook)
|
lmregus/Portfolio
|
python/design_patterns/env/lib/python3.7/site-packages/IPython/extensions/autoreload.py
|
Python
|
mit
| 17,198
|
test_sub_dir = "test_data/1019436/session_1"
def test_workflow_anatomical_reorient():
''' unit test for the anatomical reorient workflow BUILDER '''
import os
import commands
import pkg_resources as p
from qap.anatomical_preproc import run_anatomical_reorient
from qap.workflow_utils import build_test_case
anat_scan = p.resource_filename("qap", os.path.join(test_sub_dir, \
"anat_1", \
"anatomical_scan", \
"mprage.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"anatomical_reorient", \
"graph_anatomical_reorient.dot"))
ref_inputs = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"anatomical_reorient", \
"wf_inputs.txt"))
# build the workflow and return it
wf, base_dir = run_anatomical_reorient(anat_scan, False)
# get the workflow inputs of the workflow being tested
wf_inputs_string = str(wf.inputs).replace("\n","")
wf_inputs_string = wf_inputs_string.replace(base_dir, \
"BASE_DIRECTORY_HERE")
wf_inputs_string = wf_inputs_string.replace(anat_scan, "IN_FILE_HERE")
flag, err = build_test_case(wf, ref_inputs, ref_graph, wf_inputs_string)
assert flag == 2, err
def test_workflow_anatomical_skullstrip():
''' unit test for the anatomical skullstrip workflow BUILDER '''
import os
import commands
import pkg_resources as p
from qap.anatomical_preproc import run_anatomical_skullstrip
from qap.workflow_utils import build_test_case
anat_reorient = p.resource_filename("qap", os.path.join(test_sub_dir, \
"anat_1", \
"anatomical_reorient", \
"mprage_resample.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"anatomical_skullstrip", \
"graph_anatomical_skullstrip.dot"))
ref_inputs = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"anatomical_skullstrip", \
"wf_inputs.txt"))
# build the workflow and return it
wf, base_dir = run_anatomical_skullstrip(anat_reorient, False)
# get the workflow inputs of the workflow being tested
wf_inputs_string = str(wf.inputs).replace("\n","")
wf_inputs_string = wf_inputs_string.replace(base_dir, \
"base_directory_here")
wf_inputs_string = wf_inputs_string.replace(anat_reorient, "in_file_here", 1)
wf_inputs_string = wf_inputs_string.replace(anat_reorient, "in_file_a_here")
flag, err = build_test_case(wf, ref_inputs, ref_graph, wf_inputs_string)
assert flag == 2, err
def test_workflow_flirt_anatomical_linear_registration():
''' unit test for the anatomical reorient workflow BUILDER '''
import os
import pkg_resources as p
from qap.anatomical_preproc import run_flirt_anatomical_linear_registration
from qap.workflow_utils import build_test_case
anat_brain = p.resource_filename("qap", os.path.join(test_sub_dir, \
"anat_1", \
"anatomical_brain", \
"mprage_resample_calc.nii.gz"))
template_brain = p.resource_filename("qap", os.path.join("test_data", \
"MNI152_T1_2mm_brain.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"flirt_anatomical_linear_registration", \
"graph_flirt_anatomical_linear" \
"_registration.dot"))
ref_inputs = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"flirt_anatomical_linear_registration", \
"wf_inputs.txt"))
# build the workflow and return it
wf, base_dir = run_flirt_anatomical_linear_registration(anat_brain, \
template_brain, \
False)
# get the workflow inputs of the workflow being tested
wf_inputs_string = str(wf.inputs).replace("\n","")
wf_inputs_string = wf_inputs_string.replace(base_dir, \
"base_directory_here")
wf_inputs_string = wf_inputs_string.replace(anat_brain, "in_file_here")
wf_inputs_string = wf_inputs_string.replace(template_brain, \
"reference_here")
flag, err = build_test_case(wf, ref_inputs, ref_graph, wf_inputs_string)
assert flag == 2, err
def test_workflow_segmentation():
''' unit test for the segmentation workflow BUILDER '''
import os
import commands
import pkg_resources as p
from qap.anatomical_preproc import run_segmentation_workflow
from qap.workflow_utils import build_test_case
anat_brain = p.resource_filename("qap", os.path.join(test_sub_dir, \
"anat_1", \
"anatomical_brain", \
"mprage_resample_calc.nii.gz"))
ref_graph = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"segmentation", \
"graph_segmentation.dot"))
ref_inputs = p.resource_filename("qap", os.path.join("test_data", \
"workflow_reference", \
"segmentation", \
"wf_inputs.txt"))
# build the workflow and return it
wf, base_dir = run_segmentation_workflow(anat_brain, False)
# get the workflow inputs of the workflow being tested
wf_inputs_string = str(wf.inputs).replace("\n","")
wf_inputs_string = wf_inputs_string.replace(base_dir, \
"base_directory_here")
list_input = "['" + anat_brain + "']"
wf_inputs_string = wf_inputs_string.replace(list_input, "in_files_here")
flag, err = build_test_case(wf, ref_inputs, ref_graph, wf_inputs_string)
assert flag == 2, err
def run_all_tests_anatomical_preproc():
test_workflow_anatomical_reorient()
test_workflow_anatomical_skullstrip()
test_workflow_flirt_anatomical_linear_registration()
test_workflow_segmentation()
|
wangkangcheng/ccc
|
qap/test_anatomical_preproc.py
|
Python
|
bsd-3-clause
| 7,387
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
import evennia.accounts.manager
import django.core.validators
class Migration(migrations.Migration):
dependencies = [("accounts", "0003_auto_20150209_2234")]
operations = [
migrations.DeleteModel(name="DefaultGuest"),
migrations.DeleteModel(name="DefaultAccount"),
migrations.AlterModelManagers(
name="accountdb", managers=[("objects", evennia.accounts.manager.AccountDBManager())]
),
migrations.AlterField(
model_name="accountdb",
name="email",
field=models.EmailField(max_length=254, verbose_name="email address", blank=True),
),
migrations.AlterField(
model_name="accountdb",
name="groups",
field=models.ManyToManyField(
related_query_name="user",
related_name="user_set",
to="auth.Group",
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
verbose_name="groups",
),
),
migrations.AlterField(
model_name="accountdb",
name="last_login",
field=models.DateTimeField(null=True, verbose_name="last login", blank=True),
),
migrations.AlterField(
model_name="accountdb",
name="username",
field=models.CharField(
error_messages={"unique": "A user with that username already exists."},
max_length=30,
validators=[
django.core.validators.RegexValidator(
"^[\\w.@+-]+$",
"Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.",
"invalid",
)
],
help_text="Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.",
unique=True,
verbose_name="username",
),
),
]
|
jamesbeebop/evennia
|
evennia/accounts/migrations/0004_auto_20150403_2339.py
|
Python
|
bsd-3-clause
| 2,157
|
# this is based in the audio recording example provided by omz (Ole Zorn)
from objc_util import *
import os
def main():
AVAudioSession = ObjCClass('AVAudioSession')
NSURL = ObjCClass('NSURL')
AVAudioRecorder = ObjCClass('AVAudioRecorder')
shared_session = AVAudioSession.sharedInstance()
category_set = shared_session.setCategory_error_(ns('AVAudioSessionCategoryPlayAndRecord'), None)
settings = {ns('AVFormatIDKey'): ns(1633772320), ns('AVSampleRateKey'):ns(44100.00), ns('AVNumberOfChannelsKey'):ns(2)}
output_path = os.path.abspath('Recording.m4a')
out_url = NSURL.fileURLWithPath_(ns(output_path))
recorder = AVAudioRecorder.alloc().initWithURL_settings_error_(out_url, settings, None)
started_recording = recorder.record()
if started_recording:
print('Recording started, press the "stop script" button to end recording...')
try:
while True:
pass
except KeyboardInterrupt:
print('Stopping...')
recorder.stop()
recorder.release()
print('Stopped recording.')
import console
console.quicklook(os.path.abspath('Recording.m4a'))
if __name__ == '__main__':
main()
|
shaun-h/pythonista-objc-utils
|
Audio Recording.py
|
Python
|
mit
| 1,104
|
"""
=========================
PLS Partial Least Squares
=========================
Simple usage of various PLS flavor:
- PLSCanonical
- PLSRegression, with multivariate response, a.k.a. PLS2
- PLSRegression, with univariate response, a.k.a. PLS1
- CCA
Given 2 multivariate covarying two-dimensional datasets, X, and Y,
PLS extracts the 'directions of covariance', i.e. the components of each
datasets that explain the most shared variance between both datasets.
This is apparent on the **scatterplot matrix** display: components 1 in
dataset X and dataset Y are maximaly correlated (points lie around the
first diagonal). This is also true for components 2 in both dataset,
however, the correlation across datasets for different components is
weak: the point cloud is very spherical.
"""
print __doc__
import numpy as np
import pylab as pl
from sklearn.pls import PLSCanonical, PLSRegression, CCA
###############################################################################
# Dataset based latent variables model
n = 500
# 2 latents vars:
l1 = np.random.normal(size=n)
l2 = np.random.normal(size=n)
latents = np.array([l1, l1, l2, l2]).T
X = latents + np.random.normal(size=4 * n).reshape((n, 4))
Y = latents + np.random.normal(size=4 * n).reshape((n, 4))
X_train = X[:n / 2]
Y_train = Y[:n / 2]
X_test = X[n / 2:]
Y_test = Y[n / 2:]
print "Corr(X)"
print np.round(np.corrcoef(X.T), 2)
print "Corr(Y)"
print np.round(np.corrcoef(Y.T), 2)
###############################################################################
# Canonical (symetric) PLS
# Transform data
# ~~~~~~~~~~~~~~
plsca = PLSCanonical(n_components=2)
plsca.fit(X_train, Y_train)
X_train_r, Y_train_r = plsca.transform(X_train, Y_train)
X_test_r, Y_test_r = plsca.transform(X_test, Y_test)
# Scatter plot of scores
# ~~~~~~~~~~~~~~~~~~~~~~
# 1) on diagonal plot X vs Y scores on each components
pl.subplot(221)
pl.plot(X_train_r[:, 0], Y_train_r[:, 0], "ob", label="train")
pl.plot(X_test_r[:, 0], Y_test_r[:, 0], "or", label="test")
pl.xlabel("y")
pl.ylabel("x")
pl.title('Comp. 1, corr = %.2f' %
np.corrcoef(X_test_r[:, 0], X_test_r[:, 0])[0, 1])
pl.legend()
pl.subplot(224)
pl.plot(X_train_r[:, 1], Y_train_r[:, 1], "ob", label="train")
pl.plot(X_test_r[:, 1], Y_test_r[:, 1], "or", label="test")
pl.xlabel("y")
pl.ylabel("x")
pl.title('Comp. 2, corr = %.2f' %
np.corrcoef(X_test_r[:, 1], X_test_r[:, 1])[0, 1])
pl.legend()
# 2) Off diagonal plot components 1 vs 2 for X and Y
pl.subplot(222)
pl.plot(X_train_r[:, 0], X_train_r[:, 1], "*b", label="train")
pl.plot(X_test_r[:, 0], X_test_r[:, 1], "*r", label="test")
pl.xlabel("X comp. 1")
pl.ylabel("X comp. 2")
pl.title('X, corr = %.2f' % np.corrcoef(X_test_r[:, 0], X_test_r[:, 1])[0, 1])
pl.legend()
pl.subplot(223)
pl.plot(Y_train_r[:, 0], Y_train_r[:, 1], "*b", label="train")
pl.plot(Y_test_r[:, 0], Y_test_r[:, 1], "*r", label="test")
pl.xlabel("Y comp. 1")
pl.ylabel("Y comp. 2")
pl.title('Y, corr = %.2f' % np.corrcoef(Y_test_r[:, 0], Y_test_r[:, 1])[0, 1])
pl.legend()
pl.show()
###############################################################################
# PLS regression, with multivariate response, a.k.a. PLS2
n = 1000
q = 3
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
B = np.array([[1, 2] + [0] * (p - 2)] * q).T
# each Yj = 1*X1 + 2*X2 + noize
Y = np.dot(X, B) + np.random.normal(size=n * q).reshape((n, q)) + 5
pls2 = PLSRegression(n_components=3)
pls2.fit(X, Y)
print "True B (such that: Y = XB + Err)"
print B
# compare pls2.coefs with B
print "Estimated B"
print np.round(pls2.coefs, 1)
pls2.predict(X)
###############################################################################
# PLS regression, with univariate response, a.k.a. PLS1
n = 1000
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
y = X[:, 0] + 2 * X[:, 1] + np.random.normal(size=n * 1) + 5
pls1 = PLSRegression(n_components=3)
pls1.fit(X, y)
# note that the number of compements exceeds 1 (the dimension of y)
print "Estimated betas"
print np.round(pls1.coefs, 1)
###############################################################################
# CCA (PLS mode B with symetric deflation)
cca = CCA(n_components=2)
cca.fit(X_train, Y_train)
X_train_r, Y_train_r = plsca.transform(X_train, Y_train)
X_test_r, Y_test_r = plsca.transform(X_test, Y_test)
|
cdegroc/scikit-learn
|
examples/plot_pls.py
|
Python
|
bsd-3-clause
| 4,317
|
from build_template import *
from optparse import OptionParser
import json
import os
def generate_enum_classfile(json_file, model):
filename = os.path.basename(json_file)
p = os.path.dirname(os.path.dirname(json_file))
enum_name = filename[:-5]
enum_file = os.path.join(p, enum_name + '.php')
print("$")
d = {
'enum_name': enum_name,
'array': model
}
create_file_from_template('enum.tpl.php', d, enum_file)
"""
"""
if __name__ == '__main__':
# sys.argv = ['build_enum.py', '--json=D:\Projects\Badmin\www\defines\const\A.json']
parser = OptionParser()
parser.add_option("-j", "--json", action="store",
dest="json_file", help="Provide JSON file name")
options, args = parser.parse_args()
json_file = options.json_file
with open(json_file, 'r', encoding='utf-8') as f:
content = f.read()
model = json.loads(content)
r = generate_enum_classfile(json_file, model)
print(json.dumps(r))
|
healerkx/AdminBuildr
|
scripts/build_enum.py
|
Python
|
apache-2.0
| 1,007
|
"""Runs the Treadmill container cleanup job.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import click
from treadmill import appenv
from treadmill import cleanup
from treadmill import cli
from treadmill import utils
def init():
"""Top level command handler."""
@click.group(name='cleanup')
def cleanup_grp():
"""Cleanup click group."""
@cleanup_grp.command('watcher')
@click.option('--approot', type=click.Path(exists=True),
envvar='TREADMILL_APPROOT', required=True)
def cleanup_watcher(approot):
"""Start cleanup watcher."""
tm_env = appenv.AppEnvironment(root=approot)
cleaner = cleanup.Cleanup(tm_env)
cleaner.run()
@cleanup_grp.command('instance')
@click.option('--approot', type=click.Path(exists=True),
envvar='TREADMILL_APPROOT', required=True)
@click.option('--runtime', envvar='TREADMILL_RUNTIME', required=True)
@click.option('--runtime-param', type=cli.LIST, required=False)
@click.argument('instance', nargs=1)
def cleanup_instance(approot, runtime, instance, runtime_param):
"""Actually do the cleanup of the instance.
"""
param = utils.equals_list2dict(runtime_param or [])
tm_env = appenv.AppEnvironment(root=approot)
cleaner = cleanup.Cleanup(tm_env)
cleaner.invoke(runtime, instance, param)
del cleanup_watcher
del cleanup_instance
return cleanup_grp
|
Morgan-Stanley/treadmill
|
lib/python/treadmill/sproc/cleanup.py
|
Python
|
apache-2.0
| 1,571
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
A Django management command class that handles locking so it doesn't
trigger multiple times when used inside of an external script, such as a
cron job.
If a lock already exists, the command will exit silently.
"""
import abc
from contextlib import contextmanager
from django.core.cache import cache
from django.core.management.base import BaseCommand
@contextmanager
def cache_lock(lock_key):
"""
A context manager that attempts to create a lock in cache.
Returns True if a lock was established, or False if a lock already
existed.
"""
lock = cache.get(lock_key)
if not lock:
cache.set(lock_key, True, 60)
try:
yield True
finally:
cache.delete(lock_key)
else:
yield False
class CronCommand(BaseCommand):
lock_key = 'cron-command'
def get_lock_key(self):
return 'command-lock:' + self.lock_key
def get_lock(self):
return cache_lock(self.get_lock_key())
def handle(self, *args, **options):
with self.get_lock() as safe:
if safe:
self.handle_safe(**options)
@abc.abstractmethod
def handle_safe(self, *args, **options):
"""
The handle method, but with the assurance that it isn't being executed
by another process.
"""
|
ericawright/bedrock
|
bedrock/utils/management/cron_command.py
|
Python
|
mpl-2.0
| 1,525
|
# Copyright (c) 2016 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2008-2009 The Hewlett-Packard Development Company
# Copyright (c) 2004-2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
import os
import re
import sys
import convert
import jobfile
from attrdict import attrdict, multiattrdict, optiondict
from code_formatter import code_formatter
from multidict import multidict
from orderdict import orderdict
from smartdict import SmartDict
from sorteddict import SortedDict
# panic() should be called when something happens that should never
# ever happen regardless of what the user does (i.e., an acutal m5
# bug).
def panic(fmt, *args):
print >>sys.stderr, 'panic:', fmt % args
sys.exit(1)
# fatal() should be called when the simulation cannot continue due to
# some condition that is the user's fault (bad configuration, invalid
# arguments, etc.) and not a simulator bug.
def fatal(fmt, *args):
print >>sys.stderr, 'fatal:', fmt % args
sys.exit(1)
# warn() should be called when the user should be warned about some condition
# that may or may not be the user's fault, but that they should be made aware
# of as it may affect the simulation or results.
def warn(fmt, *args):
print >>sys.stderr, 'warn:', fmt % args
# inform() should be called when the user should be informed about some
# condition that they may be interested in.
def inform(fmt, *args):
print >>sys.stdout, 'info:', fmt % args
class Singleton(type):
def __call__(cls, *args, **kwargs):
if hasattr(cls, '_instance'):
return cls._instance
cls._instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instance
def addToPath(path):
"""Prepend given directory to system module search path. We may not
need this anymore if we can structure our config library more like a
Python package."""
# if it's a relative path and we know what directory the current
# python script is in, make the path relative to that directory.
if not os.path.isabs(path) and sys.path[0]:
path = os.path.join(sys.path[0], path)
path = os.path.realpath(path)
# sys.path[0] should always refer to the current script's directory,
# so place the new dir right after that.
sys.path.insert(1, path)
# Apply method to object.
# applyMethod(obj, 'meth', <args>) is equivalent to obj.meth(<args>)
def applyMethod(obj, meth, *args, **kwargs):
return getattr(obj, meth)(*args, **kwargs)
# If the first argument is an (non-sequence) object, apply the named
# method with the given arguments. If the first argument is a
# sequence, apply the method to each element of the sequence (a la
# 'map').
def applyOrMap(objOrSeq, meth, *args, **kwargs):
if not isinstance(objOrSeq, (list, tuple)):
return applyMethod(objOrSeq, meth, *args, **kwargs)
else:
return [applyMethod(o, meth, *args, **kwargs) for o in objOrSeq]
def compareVersions(v1, v2):
"""helper function: compare arrays or strings of version numbers.
E.g., compare_version((1,3,25), (1,4,1)')
returns -1, 0, 1 if v1 is <, ==, > v2
"""
def make_version_list(v):
if isinstance(v, (list,tuple)):
return v
elif isinstance(v, str):
return map(lambda x: int(re.match('\d+', x).group()), v.split('.'))
else:
raise TypeError
v1 = make_version_list(v1)
v2 = make_version_list(v2)
# Compare corresponding elements of lists
for n1,n2 in zip(v1, v2):
if n1 < n2: return -1
if n1 > n2: return 1
# all corresponding values are equal... see if one has extra values
if len(v1) < len(v2): return -1
if len(v1) > len(v2): return 1
return 0
def crossproduct(items):
if len(items) == 1:
for i in items[0]:
yield (i,)
else:
for i in items[0]:
for j in crossproduct(items[1:]):
yield (i,) + j
def flatten(items):
while items:
item = items.pop(0)
if isinstance(item, (list, tuple)):
items[0:0] = item
else:
yield item
# force scalars to one-element lists for uniformity
def makeList(objOrList):
if isinstance(objOrList, list):
return objOrList
return [objOrList]
def printList(items, indent=4):
line = ' ' * indent
for i,item in enumerate(items):
if len(line) + len(item) > 76:
print line
line = ' ' * indent
if i < len(items) - 1:
line += '%s, ' % item
else:
line += item
print line
def readCommand(cmd, **kwargs):
"""run the command cmd, read the results and return them
this is sorta like `cmd` in shell"""
from subprocess import Popen, PIPE, STDOUT
if isinstance(cmd, str):
cmd = cmd.split()
no_exception = 'exception' in kwargs
exception = kwargs.pop('exception', None)
kwargs.setdefault('shell', False)
kwargs.setdefault('stdout', PIPE)
kwargs.setdefault('stderr', STDOUT)
kwargs.setdefault('close_fds', True)
try:
subp = Popen(cmd, **kwargs)
except Exception, e:
if no_exception:
return exception
raise
return subp.communicate()[0]
def makeDir(path):
"""Make a directory if it doesn't exist. If the path does exist,
ensure that it is a directory"""
if os.path.exists(path):
if not os.path.isdir(path):
raise AttributeError, "%s exists but is not directory" % path
else:
os.mkdir(path)
def isInteractive():
"""Check if the simulator is run interactively or in a batch environment"""
return sys.__stdin__.isatty()
|
Weil0ng/gem5
|
src/python/m5/util/__init__.py
|
Python
|
bsd-3-clause
| 7,742
|
"""
homeassistant.components.notify.free_mobile
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Free Mobile SMS platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.free_mobile/
"""
import logging
from homeassistant.components.notify import DOMAIN, BaseNotificationService
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_USERNAME
from homeassistant.helpers import validate_config
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ['freesms==0.1.0']
def get_service(hass, config):
""" Get the Free Mobile SMS notification service. """
if not validate_config({DOMAIN: config},
{DOMAIN: [CONF_USERNAME,
CONF_ACCESS_TOKEN]},
_LOGGER):
return None
return FreeSMSNotificationService(config[CONF_USERNAME],
config[CONF_ACCESS_TOKEN])
# pylint: disable=too-few-public-methods
class FreeSMSNotificationService(BaseNotificationService):
""" Implements notification service for the Free Mobile SMS service. """
def __init__(self, username, access_token):
from freesms import FreeClient
self.free_client = FreeClient(username, access_token)
def send_message(self, message="", **kwargs):
""" Send a message to the Free Mobile user cell. """
resp = self.free_client.send_sms(message)
if resp.status_code == 400:
_LOGGER.error("At least one parameter is missing")
elif resp.status_code == 402:
_LOGGER.error("Too much SMS send in a few time")
elif resp.status_code == 403:
_LOGGER.error("Wrong Username/Password")
elif resp.status_code == 500:
_LOGGER.error("Server error, try later")
|
coteyr/home-assistant
|
homeassistant/components/notify/free_mobile.py
|
Python
|
mit
| 1,859
|
dem_filename = '../data/ideal_flat.tif'
max_area= 9999999999999999**2 #Effectively unlimited upper area -- allow tolerance check to refine it further
max_tolerance = 5 # 5m max RMSE between triangle and underlying elevation set to -1 to skip tolerance checks
min_area = 5**2 #triangle area below which we will no longer refine, regardless of max_tolerance
constraints = { 'river_network' :
{
'file': '../data/Stream.shp',
'simplify':1 # will be in original projection units
}
}
|
Chrismarsh/mesher
|
examples/flat_stream/flat_stream.py
|
Python
|
gpl-3.0
| 515
|
from django.shortcuts import get_object_or_404
from rest_framework import generics, permissions as drf_permissions
from rest_framework.exceptions import NotFound, ValidationError, PermissionDenied
from api.base.exceptions import Gone
from api.base import permissions as base_permissions
from api.base.views import JSONAPIBaseView
from api.comments.permissions import (
CommentDetailPermissions,
CommentReportsPermissions
)
from api.comments.serializers import (
CommentSerializer,
NodeCommentDetailSerializer,
RegistrationCommentDetailSerializer,
CommentReportSerializer,
CommentReportDetailSerializer,
CommentReport
)
from framework.auth.core import Auth
from framework.auth.oauth_scopes import CoreScopes
from framework.exceptions import PermissionsError
from osf.models import AbstractNode, Comment, BaseFileNode
from addons.wiki.models import NodeWikiPage
class CommentMixin(object):
"""Mixin with convenience methods for retrieving the current comment based on the
current URL. By default, fetches the comment based on the comment_id kwarg.
"""
serializer_class = CommentSerializer
comment_lookup_url_kwarg = 'comment_id'
def get_comment(self, check_permissions=True):
pk = self.kwargs[self.comment_lookup_url_kwarg]
comment = get_object_or_404(Comment, guids___id=pk, root_target__isnull=False)
# Deleted root targets still appear as tuples in the database and are included in
# the above query, requiring an additional check
if comment.root_target.referent.is_deleted:
comment.root_target = None
comment.save()
if comment.root_target is None:
raise NotFound
if check_permissions:
# May raise a permission denied
self.check_object_permissions(self.request, comment)
return comment
class CommentDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, CommentMixin):
"""The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/comments_read).
"""
permission_classes = (
drf_permissions.IsAuthenticatedOrReadOnly,
CommentDetailPermissions,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.NODE_COMMENTS_READ]
required_write_scopes = [CoreScopes.NODE_COMMENTS_WRITE]
serializer_class = NodeCommentDetailSerializer
view_category = 'comments'
view_name = 'comment-detail'
# overrides RetrieveAPIView
def get_object(self):
comment = self.get_comment()
comment_node = None
if isinstance(comment.target.referent, AbstractNode):
comment_node = comment.target.referent
elif isinstance(comment.target.referent, (NodeWikiPage,
BaseFileNode)):
comment_node = comment.target.referent.node
if comment_node and comment_node.is_registration:
self.serializer_class = RegistrationCommentDetailSerializer
return comment
def perform_destroy(self, instance):
auth = Auth(self.request.user)
if instance.is_deleted:
raise ValidationError('Comment already deleted.')
else:
try:
instance.delete(auth, save=True)
except PermissionsError:
raise PermissionDenied('Not authorized to delete this comment.')
class CommentReportsList(JSONAPIBaseView, generics.ListCreateAPIView, CommentMixin):
"""List of reports made for a comment. *Writeable*.
Paginated list of reports for a comment. Each resource contains the full representation of the
report, meaning additional requests to an individual comment's report detail view are not necessary.
###Permissions
The comment reports endpoint can only be viewed by users with permission to comment on the node. Users
are only shown comment reports that they have made.
##Attributes
OSF comment report entities have the "comment_reports" `type`.
name type description
=====================================================================================
category string the type of spam, must be one of the allowed values
message string description of why the comment was reported
##Links
See the [JSON-API spec regarding pagination](http://jsonapi.org/format/1.0/#fetching-pagination).
##Actions
###Create
Method: POST
URL: /links/self
Query Params: <none>
Body (JSON): {
"data": {
"type": "comment_reports", # required
"attributes": {
"category": {category}, # mandatory
"message": {text}, # optional
}
}
}
Success: 201 CREATED + comment report representation
To create a report for this comment, issue a POST request against this endpoint. The `category` field is mandatory,
and must be one of the following: "spam", "hate" or "violence" . The `message` field is optional. If the comment
report creation is successful the API will return a 201 response with the representation of the new comment report
in the body. For the new comment report's canonical URL, see the `/links/self` field of the response.
##Query Params
*None*.
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticated,
CommentReportsPermissions,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.COMMENT_REPORTS_READ]
required_write_scopes = [CoreScopes.COMMENT_REPORTS_WRITE]
serializer_class = CommentReportSerializer
view_category = 'comments'
view_name = 'comment-reports'
ordering = ('-modified',)
def get_queryset(self):
user_id = self.request.user._id
comment = self.get_comment()
reports = comment.reports
serialized_reports = []
if user_id in reports:
report = CommentReport(user_id, reports[user_id]['category'], reports[user_id]['text'])
serialized_reports.append(report)
return serialized_reports
class CommentReportDetail(JSONAPIBaseView, generics.RetrieveUpdateDestroyAPIView, CommentMixin):
"""Details about a specific comment report. *Writeable*.
###Permissions
A comment report detail can only be viewed, edited and removed by the user who created the report.
##Attributes
OSF comment report entities have the "comment_reports" `type`.
name type description
=====================================================================================
category string the type of spam, must be one of the allowed values
message string description of why the comment was reported
##Links
self: the canonical api endpoint of this comment report
##Actions
###Update
Method: PUT / PATCH
URL: /links/self
Query Params: <none>
Body (JSON): {
"data": {
"type": "comment_reports", # required
"id": {user_id}, # required
"attributes": {
"category": {category}, # mandatory
"message": {text}, # optional
}
}
}
Success: 200 OK + comment report representation
To update a report for this comment, issue a PUT/PATCH request against this endpoint. The `category` field is
mandatory for a PUT request and must be one of the following: "spam", "hate" or "violence". The `message` field
is optional. Non-string values will be accepted and stringified, but we make no promises about the stringification
output. So don't do that.
###Delete
Method: DELETE
URL: /links/self
Query Params: <none>
Success: 204 + No content
To delete a comment report, issue a DELETE request against `/links/self`. A successful delete will return a
204 No Content response.
##Query Params
*None*.
#This Request/Response
"""
permission_classes = (
drf_permissions.IsAuthenticated,
CommentReportsPermissions,
base_permissions.TokenHasScope,
)
required_read_scopes = [CoreScopes.COMMENT_REPORTS_READ]
required_write_scopes = [CoreScopes.COMMENT_REPORTS_WRITE]
serializer_class = CommentReportDetailSerializer
view_category = 'comments'
view_name = 'report-detail'
# overrides RetrieveUpdateDestroyAPIView
def get_object(self):
comment = self.get_comment()
reports = comment.reports
user_id = self.request.user._id
reporter_id = self.kwargs['user_id']
if reporter_id != user_id:
raise PermissionDenied('Not authorized to comment on this project.')
if reporter_id in reports:
return CommentReport(user_id, reports[user_id]['category'], reports[user_id]['text'])
else:
raise Gone(detail='The requested comment report is no longer available.')
# overrides RetrieveUpdateDestroyAPIView
def perform_destroy(self, instance):
user = self.request.user
comment = self.get_comment()
try:
comment.retract_report(user, save=True)
except ValueError as error:
raise ValidationError(error.message)
|
chennan47/osf.io
|
api/comments/views.py
|
Python
|
apache-2.0
| 9,881
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from resources.datatables import FactionStatus
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('assassin_mission_recruiter_imperial')
mobileTemplate.setLevel(16)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
mobileTemplate.setScale(1)
mobileTemplate.setSocialGroup("imperial")
mobileTemplate.setAssistRange(6)
mobileTemplate.setStalker(False)
mobileTemplate.setFaction("imperial")
mobileTemplate.setFactionStatus(FactionStatus.Combatant)
templates = Vector()
templates.add('object/mobile/shared_dressed_npe_imperial_officer.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/ranged/carbine/shared_carbine_e11.iff', WeaponType.CARBINE, 1.0, 15, 'energy')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
mobileTemplate.setDefaultAttack('rangedShot')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('imp_stealth_op', mobileTemplate)
return
|
agry/NGECore2
|
scripts/mobiles/generic/faction/imperial/imp_stealth_operative.py
|
Python
|
lgpl-3.0
| 1,435
|
# coding: utf-8
#
# basiclib.py
#
# Author: Huang Anbu
# Date: 2017.3
#
# Description: Layer architecture
#
# - HiddenLayer
# - LogisticRegression: output layer
# - DropoutHiddenLayer: HiddenLayer with dropout
# - MLPDropout: multilayer perceptron with dropout
# - LeNetConvPoolLayer: modified from http://www.deeplearning.net/tutorial/lenet.html
#
# Copyright©2017. All Rights Reserved.
# ===============================================================================================
from basiclib import *
class HiddenLayer(object):
def __init__ (self, rng, input, n_input, n_output, activation=T.nnet.relu, W=None, b=None):
self.input = input
self.n_input = n_input
self.n_output = n_output
if W is None:
if activation==T.nnet.relu:
W = (0.01*rng.standard_normal(size=(n_input, n_output))).astype(theano.config.floatX)
else:
W = rng.uniform(
low = -numpy.sqrt(6.0/(n_input+n_output)),
high = numpy.sqrt(6.0/(n_input+n_output)),
size = (n_input, n_output)
).astype(theano.config.floatX)
if activation==T.nnet.sigmoid:
W *= 4.0
W = theano.shared(value=W, name='Hidden_W')
self.W = W
if b is None:
b = numpy.zeros(shape=(n_output, )).astype(theano.config.floatX)
b = theano.shared(value=b, name='Hidden_b')
self.b = b
self.params = [self.W, self.b]
self.output = activation(T.dot(input, self.W) + self.b)
class LogisticRegression(object):
def __init__(self, input, n_input, n_output, W=None, b=None):
self.input = input
self.n_input = n_input
self.n_output = n_output
if W is None:
W = numpy.zeros(shape=(n_input, n_output)).astype(theano.config.floatX)
W = theano.shared(value=W, name='Logistic_W')
self.W = W
if b is None:
b = numpy.zeros(shape=(n_output, )).astype(theano.config.floatX)
b = theano.shared(value=b, name='Logistic_b')
self.b = b
self.params=[self.W, self.b]
self.p_y_given_x = T.nnet.softmax(T.dot(input, self.W)+self.b)
self.p_pred = T.argmax(self.p_y_given_x, axis=1)
def cross_entropy(self, y):
return -T.mean(T.log(self.p_y_given_x)[T.arange(y.shape[0]), y])
def errors(self, y):
return T.mean(T.neq(self.p_pred, y))
def _dropout_from_layer(rng, layer, p):
srng = RandomStreams(rng.randint(12345678))
return (layer * srng.binomial(n=1, p=p)).astype(theano.config.floatX)
class DropoutHiddenLayer(HiddenLayer):
def __init__ (self, rng, input, n_input, n_output, dropout_rate=0.8, activation=T.nnet.relu, W=None, b=None):
super(DropoutHiddenLayer, self).__init__(
rng=rng, input=input, n_input=n_input, n_output=n_output, W=W, b=b, activation=activation
)
self.output = _dropout_from_layer(rng, self.output, dropout_rate)
class MLPDropout(object):
def __init__ (self, rng, input, n_input, n_hiddens, n_output, dropout_rates):
layer_sizes = [n_input] + n_hiddens + [n_output]
self.weight_matrix_size = zip(layer_sizes[:-1], layer_sizes[1:])
self.hidden_layers, self.dropout_hidden_layers = [], []
for idx, (n_in, n_out) in enumerate(self.weight_matrix_size[:-1]):
if idx == 0:
next_input, next_dropout_input = input, _dropout_from_layer(rng, input, dropout_rates[idx])
else:
next_input, next_dropout_input = self.hidden_layers[-1].output, self.dropout_hidden_layers[-1].output
self.dropout_hidden_layers.append(DropoutHiddenLayer(rng, next_dropout_input, n_in, n_out, dropout_rates[idx+1], T.nnet.relu))
self.hidden_layers.append(HiddenLayer(
rng, next_input, n_in, n_out, T.nnet.relu,
self.dropout_hidden_layers[-1].W * dropout_rates,
self.dropout_hidden_layers[-1].b
)
)
n_in, n_out = self.weight_matrix_size[-1]
self.dropout_output_layer = LogisticRegression(self.dropout_hidden_layers[-1].output, n_in, n_out)
self.output_layer = LogisticRegression(self.hidden_layers[-1].output, n_in, n_out,
self.dropout_output_layer.W*dropout_rates, self.dropout_output_layer.b
)
self.cross_entropy = self.output_layer.cross_entropy
self.errors = self.output_layer.errors
self.dropout_cross_entropy = self.dropout_output_layer.cross_entropy
self.dropout_errors = self.dropout_output_layer.errors
self.params = [param for layer in self.dropout_hidden_layers for param in layer.params]
self.params.extend(self.dropout_output_layer.params)
class LeNetConvPoolLayer(object):
def __init__(self, rng, input, filter_shape, image_shape, poolsize=(2, 2)):
self.input = input
fan_in = numpy.prod(filter_shape[1:])
fan_out = (filter_shape[0] * numpy.prod(filter_shape[2:]) // numpy.prod(poolsize))
W_bound = numpy.sqrt(6. / (fan_in + fan_out))
self.W = theano.shared(
value=numpy.asarray(rng.uniform(low=-W_bound, high=W_bound, size=filter_shape), dtype=theano.config.floatX),
name='conv_w', borrow=True
)
b_values = numpy.zeros((filter_shape[0],), dtype=theano.config.floatX)
self.b = theano.shared(value=b_values, name='conv_b', borrow=True)
conv_out = conv2d(
input=input,
filters=self.W,
filter_shape=filter_shape,
input_shape=image_shape
)
self.conv_output = T.nnet.relu(conv_out+self.b.dimshuffle('x', 0, 'x', 'x'))
pooled_out = pool.pool_2d(
input=self.conv_output,
ds=poolsize,
ignore_border=True
)
#self.output = T.tanh(pooled_out + self.b.dimshuffle('x', 0, 'x', 'x'))
self.output = pooled_out
self.params = [self.W, self.b]
self.input = input
|
innovation-cat/DeepLearningBook
|
sentiment analysis/cnn_model.py
|
Python
|
mit
| 5,410
|
from a10sdk.common.A10BaseClass import A10BaseClass
class AddressList(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param ipv4_address: {"type": "string", "description": "IP address", "format": "ipv4-address"}
:param ipv4_netmask: {"type": "string", "description": "IP subnet mask", "format": "ipv4-netmask"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "address-list"
self.DeviceProxy = ""
self.ipv4_address = ""
self.ipv4_netmask = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Ip(A10BaseClass):
"""Class Description::
Global IP configuration subcommands.
Class ip supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param address_list: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"ipv4-address": {"type": "string", "description": "IP address", "format": "ipv4-address"}, "optional": true, "ipv4-netmask": {"type": "string", "description": "IP subnet mask", "format": "ipv4-netmask"}}}]}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/interface/loopback/{ifnum}/ip`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "ip"
self.a10_url="/axapi/v3/interface/loopback/{ifnum}/ip"
self.DeviceProxy = ""
self.address_list = []
self.ospf = {}
self.uuid = ""
self.rip = {}
self.router = {}
for keys, value in kwargs.items():
setattr(self,keys, value)
|
amwelch/a10sdk-python
|
a10sdk/core/interface/interface_loopback_ip.py
|
Python
|
apache-2.0
| 2,191
|
import requests
import re
import os
import win32com.client
import time
import sys
from html.parser import HTMLParser
#class to parse the torrent sites and retrieve magnet links
class MyHTMLParser(HTMLParser):
magnet = ""
urls = []
#function to retrieve magnet link by checking the a href tags
def handle_starttag(self, tag, attrs):
if tag == 'a':
for name, value in attrs:
if name == 'href':
if value[0:7] == 'magnet:':
self.magnet = value
elif value.find('1337x') != -1:
self.urls.append(value)
elif value.find('kickass.to') != -1:
self.urls.append(value)
elif value.find('thepiratebay.org') != -1:
self.urls.append(value)
elif value.find('h33t.to') != -1:
self.urls.append(value)
elif value.find('torrentreactor.net') != -1:
self.urls.append(value)
#end of class
#function to get content from the websites as a text using 'requests'
def get_content(url):
r = requests.get(url)
return r.text
#general function for matching a pattern and returning the first match
def match_pattern(pattern, content):
matches = re.search(pattern, content)
return matches.group()
show_name = ""
parser = MyHTMLParser()
try:
file = open("showname.txt", "r")
file_content = file.readline()
index = file_content.find(':')
show_name = file_content[index+2:]
file.close()
except FileNotFoundError:
print('Please use user_input.py to specify the torrent you wish to download.')
time.sleep(10)
sys.exit()
search_url = "http://torrentz.in/search?f="+show_name
#get content of the torrent search results
main_content = get_content(search_url)
index = main_content.find('peers')
#all of the torrent links on the search results page for torrentz.in
#are in the for of '/40alphanumerics hence the regex to match
#and retrieve the first match as it is the one with highest peers
search_url = "http://torrentz.in/" + match_pattern("[a-zA-Z0-9]{40}",
main_content[index:-1])
#the main_content will now contain the page for torrent specific links i.e.
#links for a particular torrent such as 1337x h33t kickass etc
main_content = get_content(search_url)
#used the parser to find out if torrent is available at trusted sites
parser.feed(main_content)
#the main content will now contain the actual webpage where the magnet is found
main_content = get_content(parser.urls[0])
#feed the parser the webpage to find the magnet
parser.feed(main_content)
#start application associated with magnet eg bitcomet,utorrent
os.startfile(parser.magnet)
shell = win32com.client.Dispatch('WScript.Shell')
time.sleep(7)
#send an Enter key to the bitcomet to start the download
shell.SendKeys("{Enter}", 0)
|
hrishikeshsathe/py-auto-downloader
|
py_scheduler.py
|
Python
|
mit
| 2,988
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VMHostOperations(object):
"""VMHostOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.elastic.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name, # type: str
monitor_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VMHostListResponse"]
"""List the vm resources currently being monitored by the Elastic monitor resource.
List the vm resources currently being monitored by the Elastic monitor resource.
:param resource_group_name: The name of the resource group to which the Elastic resource
belongs.
:type resource_group_name: str
:param monitor_name: Monitor resource name.
:type monitor_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VMHostListResponse or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.elastic.models.VMHostListResponse]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VMHostListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'monitorName': self._serialize.url("monitor_name", monitor_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.post(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VMHostListResponse', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.ResourceProviderDefaultErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Elastic/monitors/{monitorName}/listVMHost'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/elastic/azure-mgmt-elastic/azure/mgmt/elastic/operations/_vm_host_operations.py
|
Python
|
mit
| 5,849
|
#!/usr/bin/env python
#coding=utf8
#负责模拟登陆
#
import pycurl
import StringIO
from urllib import urlencode
import get_login_code
def GetLoginInfo(post_data,cookie_file,proxy):
login_url = "http://www.bjguahao.gov.cn/comm/logon.php"
#cookie_file = './Cookie/cookie.txt'
ch = pycurl.Curl()
buffer_con = StringIO.StringIO()
header = [
"Content-Type: application/x-www-form-urlencoded; charset=UTF-8",
"Accept: */*",
"Cache-Control:max-age=0",
"Connection:keep-alive",
"Host:www.bjguahao.gov.cn",
"User-Agent:Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.153 Safari/537.36",
"X-Requested-With:XMLHttpRequest",
"Referer:http://www.bjguahao.gov.cn/comm/index.html",
"Origin:http://www.bjguahao.gov.cn",
]
# post_data = {
# "truename":"李晨辉",
# "sfzhm":"142726199305301214",
# "yzm":"4811"
# }
post_data = urlencode(post_data)
ch.setopt(ch.URL, login_url)
ch.setopt(ch.VERBOSE, 1) #查看http信息
ch.setopt(ch.FOLLOWLOCATION, 1)
ch.setopt(ch.HTTPHEADER, header)
ch.setopt(ch.WRITEFUNCTION, buffer_con.write)
ch.setopt(ch.POSTFIELDS, post_data) #发送的数据
ch.setopt(ch.COOKIEFILE, cookie_file)
ch.setopt(ch.COOKIEJAR, cookie_file) #保存cookie
#ch.setopt(ch.PROXY, 'http://125.46.100.198:9999') #设置代理服务器
if proxy : ch.setopt(ch.PROXY, proxy) #设置代理服务器
ch.perform()
html=buffer_con.getvalue()
buffer_con.close()
ch.close()
return html
def Login(truename,id_card_num,cookie_file,code_img,proxy):
post_data = {}
post_data['truename'] = truename
post_data['sfzhm'] = id_card_num
post_data['yzm'] = get_login_code.GetLoginCode(cookie_file,code_img,proxy)
login_info = GetLoginInfo(post_data,cookie_file,proxy)
login_info = unicode(login_info,"gbk").encode('utf8')
return login_info
if __name__ == '__main__':
#login_info = Login('张佳','640321199001020977','cookie.txt','login_code_img.gif','http://125.46.100.198:9999')
login_info = Login('李晨辉','142726199305301214','cookie.txt','login_code_img.gif',False)
fo = open('login_response.txt', 'w')
fo.write(login_info)
fo.close()
print login_info
|
lichenhui/guahao
|
guahao_beijing/login.py
|
Python
|
apache-2.0
| 2,160
|
"""
Karl Persson, Mac OSX 10.8.4/Windows 8, Python 2.7.5, Pygame 1.9.2pre
Class handling menues and menu interactions
"""
import pygame
from pygame.locals import *
import PictureElement, TextElement
# Class handling all menu elements
class Menues():
def __init__(self, fileManager, levelManager, startLevel=0):
# Saving references to file- and levelmanager etc.
self.fileManager = fileManager
self.levelManager = levelManager
self.startLevel = startLevel
self.WINDOW_SIZE = (800, 600)
# Setting standard font for menu alternatives
self.font = pygame.font.Font('neuropol.ttf', 40)
# Set all active buttons to None
self.__resetActiveElements()
# 1 = Opening, -1 = Closing
self.state = 0
# Initializing generic menu position (X)
self.menuCenter = self.WINDOW_SIZE[0]/2
# Showing main menu
def main(self):
# Creating start-button
self.buttons['start'] = TextElement.TextElement((self.WINDOW_SIZE[0]/2,280), 'Start', self.font)
# Adding start button to active buttons (sprite group), for rendering etc.
self.activeElements.add(self.buttons['start'])
# Adding logo
self.activeElements.add(PictureElement.PictureElement(self.fileManager.logoTexture, (self.WINDOW_SIZE[0]/2, 150)))
# Adding instructions
self.activeElements.add(PictureElement.PictureElement(self.fileManager.instructionsTexture, (self.WINDOW_SIZE[0]/2, 420)))
# Adding music info (creative commons)
self.activeElements.add(PictureElement.PictureElement(self.fileManager.ccMusicTexture, (self.WINDOW_SIZE[0]/2, self.WINDOW_SIZE[1]-self.fileManager.ccMusicTexture.get_height())))
# Opening menu
self.state = 1
self.__setHidden()
# Showing level won menu
def levelWon(self):
# Creating next-button
self.buttons['next'] = TextElement.TextElement((self.WINDOW_SIZE[0]/2, 280), 'Next level', self.font)
self.activeElements.add(self.buttons['next'])
# Resetting start level
self.startLevel = 0
# Opening menu
self.state = 1
self.__setHidden()
# Showing level lost menu
def levelLost(self):
# Creating restart-button
self.buttons['restart'] = TextElement.TextElement((self.WINDOW_SIZE[0]/2, 280), 'Restart level', self.font)
self.activeElements.add(self.buttons['restart'])
# Opening menu
self.state = 1
self.__setHidden()
# Showing game won menu
def gameWon(self):
# Creating next-button
self.buttons['start'] = TextElement.TextElement((self.WINDOW_SIZE[0]/2, 280), 'Play again', self.font)
self.activeElements.add(self.buttons['start'])
# Opening menu
self.state = 1
self.__setHidden()
# Closing menu
def close(self):
self.state = -1
# Updating and handling mouse-over (hovering)
def update(self, pos=(-1, -1)):
self.__updatePosition()
# Updating all active elements
for element in self.activeElements:
element.update(pos)
# Handling mouse-clicks (choosing menu alternatives)
def mouseClicked(self, pos):
# Checking all buttons for mouse clicks
if(self.buttons['start'] != None and self.buttons['start'].mouseOver(pos)):
# Setting starting level
self.levelManager.setLevel(self.startLevel)
# Removing menu
self.state = -1
# Variable to aid the hide animation
self.hideVariable = 2
elif(self.buttons['restart'] != None and self.buttons['restart'].mouseOver(pos)):
# Restart
self.levelManager.restartLevel()
# Removig menu
self.state = -1
# Variable to aid the hide animation
self.hideVariable = 2
elif(self.buttons['next'] != None and self.buttons['next'].mouseOver(pos)):
# Next
self.levelManager.setNext()
# Removing menu
self.state = -1
# Variable to aid the hide animation
self.hideVariable = 1.0
# Checking if any menu is active
def menuActive(self):
if len(self.activeElements) > 0:
return True
else:
return False
# Updating position depending on state
def __updatePosition(self):
# Move in
if self.state == 1:
# Checking if it's in position
if self.__getTotalCenter() < self.menuCenter:
for element in self.activeElements:
element.posX += (self.menuCenter - element.posX) * 0.1
else:
self.state = 0
# Move out
elif self.state == -1:
# Moving out if it's not already outside window. Otherwise resetting buttons.
if ((self.__getTotalCenter() - self.__getTotalWidth()/2) < self.WINDOW_SIZE[0]):
for element in self.activeElements:
# Calculating hiding-animation
self.hideVariable *= 1.2
element.posX += self.hideVariable
else:
self.__resetActiveElements()
# Get total center of the menu (X)
def __getTotalCenter(self):
tot = 0.0
if(len(self.activeElements) > 0):
for element in self.activeElements:
tot += element.rect.centerx
return int(tot/len(self.activeElements))
else:
return 0
# Get total width of the menu
def __getTotalWidth(self):
absoluteLeft = 0
absoluteRight = 0
firstTest = True
# Fetching extreme values
for element in self.activeElements:
# Controlling left
if element.rect.left < absoluteLeft:
absoluteLeft = element.rect.left
# Controlling right
if element.rect.right > absoluteRight:
absoluteRight = element.rect.right
if firstTest:
absoluteLeft = element.rect.left
absoluteRight = element.rect.right
firstTest = False
return absoluteRight-absoluteLeft
# Hide menu on the left side of the screen
def __setHidden(self):
# Calculating movement
moveAmount = 0 - (self.__getTotalCenter()+self.__getTotalWidth())
# Making movement
for element in self.activeElements:
element.posX += moveAmount
element.refreshPosition()
# Rendering elements to screen
def render(self, screen):
self.activeElements.draw(screen)
# Resetting all active buttons to None
def __resetActiveElements(self):
self.buttons = {'start': None, 'restart': None, 'next': None}
self.activeElements = pygame.sprite.RenderPlain()
# Get all active rects
def getRects(self):
rects = []
for element in self.activeElements:
rects.append(element.rect)
return rects
|
KPRSN/Pulse
|
Pulse/Menues.py
|
Python
|
mit
| 7,244
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
def get_ki2_list(parser):
parser.add_argument('-p', '--path_2chkifu',
default='~/data/shogi/2chkifu/',
help='2chkifu.zipを展開したディレクトリ')
args = parser.parse_args()
path_2chkifu = args.path_2chkifu
sub_dir_list = ['00001', '10000', '20000', '30000', '40000']
path_ki2_list = []
# Extract paths of KI2 files
for sub_dir in sub_dir_list:
path_dir = os.path.expanduser(os.path.join(path_2chkifu, sub_dir))
ki2files = os.listdir(path_dir)
for ki2file in ki2files:
path_ki2_list.append(os.path.join(path_dir, ki2file))
return sorted(path_ki2_list)
|
tosh1ki/pyogi
|
doc/sample_code/get_ki2_list.py
|
Python
|
mit
| 741
|
# Copyright 2015 Allen Institute for Brain Science
# This file is part of Allen SDK.
#
# Allen SDK is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# Allen SDK is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Allen SDK. If not, see <http://www.gnu.org/licenses/>.
import json, logging
from allensdk.api.queries.rma_api import RmaApi
class GlifApi(RmaApi):
def __init__(self, base_uri=None):
super(RmaApi, self).__init__(base_uri)
self.neuronal_model = None
self.ephys_sweeps = None
self.stimulus_url = None
self.neuron_config_url = None
def list_neuronal_models(self):
''' Query the API for a list of all GLIF neuronal models.
Returns
-------
list
Meta data for all GLIF neuronal models.
'''
include = "specimen(ephys_result),neuronal_model_template[name$il'*LIF*']"
return self.model_query('NeuronalModel', include=include, num_rows='all')
def get_neuronal_model(self, neuronal_model_id):
'''Query the current RMA endpoint with a neuronal_model id
to get the corresponding well known files and meta data.
Returns
-------
dict
A dictionary containing
'''
include = ( 'neuronal_model_template(well_known_files(well_known_file_type)),' +
'specimen(ephys_sweeps,ephys_result(well_known_files(well_known_file_type))),' +
'well_known_files(well_known_file_type)' )
criteria = "[id$eq%d]" % neuronal_model_id
self.neuronal_model = self.model_query('NeuronalModel',
criteria=criteria,
include=include,
num_rows='all')[0]
self.ephys_sweeps = None
self.neuron_config_url = None
self.stimulus_url = None
# sweeps come from the specimen
try:
specimen = self.neuronal_model['specimen']
self.ephys_sweeps = specimen['ephys_sweeps']
except Exception, e:
print e.message
self.ephys_sweeps = None
if self.ephys_sweeps is None:
logging.warning("Could not find ephys_sweeps for this model (%d)" % self.neuronal_model['id'])
# neuron config file comes from the neuronal model's well known files
try:
for wkf in self.neuronal_model['well_known_files']:
if wkf['path'].endswith('neuron_config.json'):
self.neuron_config_url = wkf['download_link']
break
except Exception, e:
self.neuron_config_url = None
if self.neuron_config_url is None:
logging.warning("Could not find neuron config well_known_file for this model (%d)" % self.neuronal_model['id'])
# NWB file comes from the ephys_result's well known files
try:
ephys_result = specimen['ephys_result']
for wkf in ephys_result['well_known_files']:
if wkf['well_known_file_type']['name'] == 'NWB':
self.stimulus_url = wkf['download_link']
break
except Exception, e:
self.stimulus_url = None
if self.stimulus_url is None:
logging.warning("Could not find stimulus well_known_file for this model (%d)" % self.neuronal_model['id'])
self.metadata = {
'neuron_config_url': self.neuron_config_url,
'stimulus_url': self.stimulus_url,
'ephys_sweeps': self.ephys_sweeps,
'neuronal_model': self.neuronal_model
}
return self.metadata
def get_ephys_sweeps(self):
''' Retrieve ephys sweep information out of downloaded metadata for a neuronal model
Returns
-------
list
A list of sweeps metadata dictionaries
'''
return self.ephys_sweeps
def get_neuron_config(self, output_file_name=None):
''' Retrieve a model configuration file from the API, optionally save it to disk, and
return the contents of that file as a dictionary.
Parameters
----------
output_file_name: string
File name to store the neuron configuration (optional).
'''
if self.neuron_config_url is None:
raise Exception("URL for neuron config file is empty.")
print self.api_url + self.neuron_config_url
neuron_config = self.retrieve_parsed_json_over_http(self.api_url + self.neuron_config_url)
if output_file_name:
with open(output_file_name, 'wb') as f:
f.write(json.dumps(neuron_config, indent=2))
return neuron_config
def cache_stimulus_file(self, output_file_name):
''' Download the NWB file for the current neuronal model and save it to a file.
Parameters
----------
output_file_name: string
File name to store the NWB file.
'''
if self.stimulus_url is None:
raise Exception("URL for stimulus file is empty.")
self.retrieve_file_over_http(self.api_url + self.metadata['stimulus_url'], output_file_name)
|
wvangeit/AllenSDK
|
allensdk/api/queries/glif_api.py
|
Python
|
gpl-3.0
| 5,717
|
# -*- coding: utf-8 -*-
import inspect
db = inspect.getmodule(inspect.stack()[1][0]).db
class User(db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(40), unique=True)
twitter_id = db.Column(db.String(255), unique=True)
authority = db.Column(db.String(255))
def as_dict(self):
return { c.name: getattr(self, c.name) for c in self.__table__.columns }
class Client(db.Model):
__tablename__ = 'client'
name = db.Column(db.String(255))
client_id = db.Column(db.String(40), primary_key=True)
client_secret = db.Column(db.String(50))
user_id = db.Column(db.ForeignKey('user.id'))
raw_redirect_uris = db.Column(db.Text)
raw_default_scopes = db.Column(db.Text)
@property
def client_type(self):
return 'public'
@property
def redirect_uris(self):
if self.raw_redirect_uris:
return self.raw_redirect_uris.split()
return []
@property
def default_redirect_uri(self):
return self.redirect_uris[0]
@property
def default_scopes(self):
if self.raw_default_scopes:
return self.raw_default_scopes.split()
return []
def as_dict(self):
return { c.name: getattr(self, c.name) for c in self.__table__.columns }
class Grant(db.Model):
__tablename__ = 'grant'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete='CASCADE'))
user = db.relationship('User')
client_id = db.Column(db.String(40), db.ForeignKey('client.client_id'), nullable=False)
client = db.relationship('Client')
code = db.Column(db.String(255), index=True, nullable=False)
redirect_uri = db.Column(db.String(255))
expires = db.Column(db.DateTime)
raw_scopes = db.Column(db.Text)
def delete(self):
db.session.delete(self)
db.session.commit()
return self
@property
def scopes(self):
if self.raw_scopes:
return self.raw_scopes.split()
return []
class Token(db.Model):
__tablename__ = 'token'
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete='CASCADE'))
user = db.relationship('User')
client_id = db.Column(db.String(40), db.ForeignKey('client.client_id'), nullable=False)
client = db.relationship('Client')
token_type = db.Column(db.String(40))
access_token = db.Column(db.String(255), unique=True)
refresh_token = db.Column(db.String(255), unique=True)
expires = db.Column(db.DateTime)
raw_scopes = db.Column(db.Text)
@property
def scopes(self):
if self.raw_scopes:
return self.raw_scopes.split()
return []
|
kwzrhouse/api.kwzrhouse.net
|
models.py
|
Python
|
mit
| 2,803
|
# This file is part of Indico.
# Copyright (C) 2002 - 2015 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from flask import request, session, redirect
from MaKaC.webinterface.rh.base import RH
class RHChangeLang(RH):
def _process(self):
language = request.form['lang']
session.lang = language
if session.user:
session.user.settings.set('lang', language)
assert '://' not in request.form['next'] # avoid redirecting to external url
return redirect(request.form['next'])
|
XeCycle/indico
|
indico/MaKaC/webinterface/rh/lang.py
|
Python
|
gpl-3.0
| 1,152
|
'''
Created on 17 Aug 2017
Delete middle node: Implement an algorithm to delete a node in the middle,
(i.e. any node by the first and the last node, not exactly at the middle) of a singly linked list,
given only access to the node.
@author: igoroya
'''
from chapter2 import utils
def delete_node(node):
#idea is to add the data from next node to this node
#link this node to the one node after next node
if node.next_node is None:
raise Exception("Last node provide, not valid")
node.cargo = node.next_node.cargo
node.next_node = node.next_node.next_node
if __name__ == '__main__':
my_list = utils.make_sample_list()
print(my_list)
#get one node that is in the middle
my_node = my_list.search('c')
delete_node(my_node)
print(my_list)
|
igoroya/igor-oya-solutions-cracking-coding-interview
|
crackingcointsolutions/chapter2/exercisethree.py
|
Python
|
mit
| 794
|
from django.conf.urls import url, patterns
from django.views.generic import TemplateView
urlpatterns = patterns('librehatti.voucher.views',
url(r'^voucher_generate/','voucher_generate'),
url(r'^voucher_show/','voucher_show'),
url(r'^voucher_print/','voucher_print'),
)
|
s-monisha/LibreHatti
|
src/librehatti/voucher/urls.py
|
Python
|
gpl-2.0
| 283
|
"""
CharacterDetector:
This class monitors the eve gamelog directory for new files,
as well as initializes PELD with the last day's worth of eve logs
and keeps track of what eve character belongs to which log.
When a new file enters the gamelog directory, CharacterDetector
either replaces an existing character with the new log file,
or adds a new character to the character menu.
LogReader:
This class does the actual reading of the logs. Each eve
character has it's own instance of this class. This class
contains the regex which process new log entries into a consumable
format.
"""
import re
import os
import datetime
import time
import platform
import tkinter as tk
from peld import settings
import logging
import data.oreVolume
_oreVolume = data.oreVolume._oreVolume
from tkinter import messagebox, IntVar, filedialog
from watchdog.events import FileSystemEventHandler
from watchdog.observers import Observer
_emptyResult = [[] for x in range(0,9)]
# this holds the regex strings for all the different languages the eve game log can be in
_logLanguageRegex = {
'english': {
'character': "(?<=Listener: ).*",
'sessionTime': "(?<=Session Started: ).*",
'pilotAndWeapon': '(?:.*ffffffff>(?P<default_pilot>[^\(\)<>]*)(?:\[.*\((?P<default_ship>.*)\)<|<)/b.*> \-(?: (?P<default_weapon>.*?)(?: \-|<)|.*))',
'damageOut': "\(combat\) <.*?><b>([0-9]+).*>to<",
'damageIn': "\(combat\) <.*?><b>([0-9]+).*>from<",
'armorRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> remote armor repaired to <",
'hullRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> remote hull repaired to <",
'shieldBoostedOut': "\(combat\) <.*?><b>([0-9]+).*> remote shield boosted to <",
'armorRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> remote armor repaired by <",
'hullRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> remote hull repaired by <",
'shieldBoostedIn': "\(combat\) <.*?><b>([0-9]+).*> remote shield boosted by <",
'capTransferedOut': "\(combat\) <.*?><b>([0-9]+).*> remote capacitor transmitted to <",
'capNeutralizedOut': "\(combat\) <.*?ff7fffff><b>([0-9]+).*> energy neutralized <",
'nosRecieved': "\(combat\) <.*?><b>\+([0-9]+).*> energy drained from <",
'capTransferedIn': "\(combat\) <.*?><b>([0-9]+).*> remote capacitor transmitted by <",
'capNeutralizedIn': "\(combat\) <.*?ffe57f7f><b>([0-9]+).*> energy neutralized <",
'nosTaken': "\(combat\) <.*?><b>\-([0-9]+).*> energy drained to <",
'mined': "\(mining\) .* <b><.*?><.*?>([0-9]+).*> units of .*<b>(.+)</b>"
},
'russian': {
'character': "(?<=Слушатель: ).*",
'sessionTime': "(?<=Сеанс начат: ).*",
'pilotAndWeapon': '(?:.*ffffffff>(?:<localized .*?>)?(?P<default_pilot>[^\(\)<>]*)(?:\[.*\((?:<localized .*?>)?(?P<default_ship>.*)\)<|<)/b.*> \-(?: (?:<localized .*?>)?(?P<default_weapon>.*?)(?: \-|<)|.*))',
'damageOut': "\(combat\) <.*?><b>([0-9]+).*>на<",
'damageIn': "\(combat\) <.*?><b>([0-9]+).*>из<",
'armorRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса прочности брони отремонтировано <",
'hullRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса прочности корпуса отремонтировано <",
'shieldBoostedOut': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса прочности щитов накачано <",
'armorRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса прочности брони получено дистанционным ремонтом от <",
'hullRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса прочности корпуса получено дистанционным ремонтом от <",
'shieldBoostedIn': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса прочности щитов получено накачкой от <",
'capTransferedOut': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса энергии накопителя отправлено в <",
'capNeutralizedOut': "\(combat\) <.*?ff7fffff><b>([0-9]+).*> энергии нейтрализовано <",
'nosRecieved': "\(combat\) <.*?><b>\+([0-9]+).*> энергии извлечено из <",
'capTransferedIn': "\(combat\) <.*?><b>([0-9]+).*> единиц запаса энергии накопителя получено от <",
'capNeutralizedIn': "\(combat\) <.*?ffe57f7f><b>([0-9]+).*> энергии нейтрализовано <",
'nosTaken': "\(combat\) <.*?><b>\-([0-9]+).*> энергии извлечено и передано <",
'mined': "\(mining\) .* <b><.*?><.*?>([0-9]+).*<b>(?:<localized .*?>)?(.+)\*</b>"
},
'french': {
'character': "(?<=Auditeur: ).*",
'sessionTime': "(?<=Session commencée: ).*",
'pilotAndWeapon': '(?:.*ffffffff>(?:<localized .*?>)?(?P<default_pilot>[^\(\)<>]*)(?:\[.*\((?:<localized .*?>)?(?P<default_ship>.*)\)<|<)/b.*> \-(?: (?:<localized .*?>)?(?P<default_weapon>.*?)(?: \-|<)|.*))',
'damageOut': "\(combat\) <.*?><b>([0-9]+).*>à<",
'damageIn': "\(combat\) <.*?><b>([0-9]+).*>de<",
'armorRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> points de blindage transférés à distance à <",
'hullRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> points de structure transférés à distance à <",
'shieldBoostedOut': "\(combat\) <.*?><b>([0-9]+).*> points de boucliers transférés à distance à <",
'armorRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> points de blindage réparés à distance par <",
'hullRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> points de structure réparés à distance par <",
'shieldBoostedIn': "\(combat\) <.*?><b>([0-9]+).*> points de boucliers transférés à distance par <",
'capTransferedOut': "\(combat\) <.*?><b>([0-9]+).*> points de capaciteur transférés à distance à <",
'capNeutralizedOut': "\(combat\) <.*?ff7fffff><b>([0-9]+).*> d'énergie neutralisée en faveur de <",
'nosRecieved': "\(combat\) <.*?><b>([0-9]+).*> d'énergie siphonnée aux dépens de <",
'capTransferedIn': "\(combat\) <.*?><b>([0-9]+).*> points de capaciteur transférés à distance par <",
'capNeutralizedIn': "\(combat\) <.*?ffe57f7f><b>([0-9]+).*> d'énergie neutralisée aux dépens de <",
'nosTaken': "\(combat\) <.*?><b>([0-9]+).*> d'énergie siphonnée en faveur de <",
'mined': "\(mining\) .* <b><.*?><.*?>([0-9]+).*<b>(?:<localized .*?>)?(.+)\*</b>"
},
'german': {
'character': "(?<=Empfänger: ).*",
'sessionTime': "(?<=Sitzung gestartet: ).*",
'pilotAndWeapon': '(?:.*ffffffff>(?:<localized .*?>)?(?P<default_pilot>[^\(\)<>]*)(?:\[.*\((?:<localized .*?>)?(?P<default_ship>.*)\)<|<)/b.*> \-(?: (?:<localized .*?>)?(?P<default_weapon>.*?)(?: \-|<)|.*))',
'damageOut': "\(combat\) <.*?><b>([0-9]+).*>nach<",
'damageIn': "\(combat\) <.*?><b>([0-9]+).*>von<",
'armorRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> Panzerungs-Fernreparatur zu <",
'hullRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> Rumpf-Fernreparatur zu <",
'shieldBoostedOut': "\(combat\) <.*?><b>([0-9]+).*> Schildfernbooster aktiviert zu <",
'armorRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> Panzerungs-Fernreparatur von <",
'hullRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> Rumpf-Fernreparatur von <",
'shieldBoostedIn': "\(combat\) <.*?><b>([0-9]+).*> Schildfernbooster aktiviert von <",
'capTransferedOut': "\(combat\) <.*?><b>([0-9]+).*> Fernenergiespeicher übertragen zu <",
'capNeutralizedOut': "\(combat\) <.*?ff7fffff><b>([0-9]+).*> Energie neutralisiert <",
'nosRecieved': "\(combat\) <.*?><b>\+([0-9]+).*> Energie transferiert von <",
'capTransferedIn': "\(combat\) <.*?><b>([0-9]+).*> Fernenergiespeicher übertragen von <",
'capNeutralizedIn': "\(combat\) <.*?ffe57f7f><b>\-([0-9]+).*> Energie neutralisiert <",
'nosTaken': "\(combat\) <.*?><b>\-([0-9]+).*> Energie transferiert zu <",
'mined': "\(mining\) .* <b><.*?><.*?>([0-9]+).*<b>(?:<localized .*?>)?(.+)\*</b>"
},
'japanese': {
'character': "(?<=傍聴者: ).*",
'sessionTime': "(?<=セッション開始: ).*",
'pilotAndWeapon': '(?:.*ffffffff>(?:<localized .*?>)?(?P<default_pilot>[^\(\)<>]*)(?:\[.*\((?:<localized .*?>)?(?P<default_ship>.*)\)<|<)/b.*> \-(?: (?:<localized .*?>)?(?P<default_weapon>.*?)(?: \-|<)|.*))',
'damageOut': "\(combat\) <.*?><b>([0-9]+).*>対象:<",
'damageIn': "\(combat\) <.*?><b>([0-9]+).*>攻撃者:<",
'armorRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> remote armor repaired to <",
'hullRepairedOut': "\(combat\) <.*?><b>([0-9]+).*> remote hull repaired to <",
'shieldBoostedOut': "\(combat\) <.*?><b>([0-9]+).*> remote shield boosted to <",
'armorRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> remote armor repaired by <",
'hullRepairedIn': "\(combat\) <.*?><b>([0-9]+).*> remote hull repaired by <",
'shieldBoostedIn': "\(combat\) <.*?><b>([0-9]+).*> remote shield boosted by <",
'capTransferedOut': "\(combat\) <.*?><b>([0-9]+).*> remote capacitor transmitted to <",
'capNeutralizedOut': "\(combat\) <.*?ff7fffff><b>([0-9]+).*> エネルギーニュートラライズ 対象:<",
'nosRecieved': "\(combat\) <.*?><b>\+([0-9]+).*> エネルギードレイン 対象:<",
'capTransferedIn': "\(combat\) <.*?><b>([0-9]+).*> remote capacitor transmitted by <",
'capNeutralizedIn': "\(combat\) <.*?ffe57f7f><b>([0-9]+).*>のエネルギーが解放されました<",
'nosTaken': "\(combat\) <.*?><b>\-([0-9]+).*> エネルギードレイン 攻撃者:<",
'mined': "\(mining\) .* <b><.*?><.*?>([0-9]+).*<b>(?:<localized .*?>)?(.+)\*</b>"
},
'chinese':{
'character': "(?<=收听者: ).*",
'sessionTime': "(?<=进程开始: ).*",
'pilotAndWeapon': '(?:.*ffffffff>(?:<localized .*?>)?(?P<default_pilot>[^\(\)<>]*)(?:\[.*\((?:<localized .*?>)?(?P<default_ship>.*)\)<|<)/b.*> \-(?: (?:<localized .*?>)?(?P<default_weapon>.*?)(?: \-|<)|.*))',
'damageOut': "\(combat\) <.*?><b>([0-9]+).*>对<",
'damageIn': "\(combat\) <.*?><b>([0-9]+).*>来自<",
'armorRepairedOut': "\(combat\) <.*?><b>([0-9]+).*>远程装甲维修量至<",
'hullRepairedOut': "\(combat\) <.*?><b>([0-9]+).*>远程结构维修量至<",
'shieldBoostedOut': "\(combat\) <.*?><b>([0-9]+).*>远程护盾回充增量至<",
'armorRepairedIn': "\(combat\) <.*?><b>([0-9]+).*>远程装甲维修量由<",
'hullRepairedIn': "\(combat\) <.*?><b>([0-9]+).*>远程结构维修量由<",
'shieldBoostedIn': "\(combat\) <.*?><b>([0-9]+).*>远程护盾回充增量由<",
'capTransferedOut': "\(combat\) <.*?><b>([0-9]+).*>远程电容传输至<",
'capNeutralizedOut': "\(combat\) <.*?ff7fffff><b>([0-9]+).*>能量中和<",
'nosRecieved': "\(combat\) <.*?><b>\+([0-9]+).*>被从<",
'capTransferedIn': "\(combat\) <.*?><b>([0-9]+).*>远程电容传输量由<",
'capNeutralizedIn': "\(combat\) <.*?ffe57f7f><b>([0-9]+).*>能量中和<",
'nosTaken': "\(combat\) <.*?><b>\-([0-9]+).*>被吸取到<",
'mined': "\(mining\) .*<b><.*?><.*?>([0-9]+).*>个单位的.*<b>(.+)</b>"
}
}
_logReaders = []
class CharacterDetector(FileSystemEventHandler):
def __init__(self, mainWindow, characterMenu):
self.mainWindow = mainWindow
self.characterMenu = characterMenu
self.observer = Observer()
if (platform.system() == "Windows"):
import win32com.client
oShell = win32com.client.Dispatch("Wscript.Shell")
self.path = oShell.SpecialFolders("MyDocuments") + "\\EVE\\logs\\Gamelogs\\"
else:
self.path = os.environ['HOME'] + "/Documents/EVE/logs/Gamelogs/"
self.menuEntries = []
self.logReaders = _logReaders
self.selectedIndex = IntVar()
self.playbackLogReader = None
try:
oneDayAgo = datetime.datetime.now() - datetime.timedelta(hours=24)
fileList = sorted(os.listdir(self.path), key=lambda file: os.stat(os.path.join(self.path, file)).st_mtime)
for filename in fileList:
timeString = re.sub(r'_[0-9]*\.txt$', '', filename)
try:
fileTime = datetime.datetime.strptime(timeString, "%Y%m%d_%H%M%S")
except ValueError:
continue
if (fileTime >= oneDayAgo):
self.addLog(self.path + filename)
self.selectedIndex.set(0)
if len(self.menuEntries) == 0:
self.characterMenu.menu.add_command(label='No character logs detected for past 24 hours', state=tk.DISABLED)
self.observer.schedule(self, self.path, recursive=False)
self.observer.start()
except FileNotFoundError:
logging.error('EVE logs directory not found, path checked: ' + self.path)
messagebox.showerror("Error", "Can't find the EVE logs directory. Do you have EVE installed? \n\n" +
"Path checked: " + self.path + "\n\n" +
"PELD will continue to run, but will not track EVE data.")
self.characterMenu.menu.add_command(label='No EVE installation detected', state=tk.DISABLED)
self.characterMenu.menu.add_separator()
from settings.overviewSettings import OverviewSettingsWindow
self.characterMenu.menu.add_command(label='Open overview settings', command=OverviewSettingsWindow)
def on_created(self, event):
self.addLog(event.src_path)
def addLog(self, logPath):
logging.info('Processing log file: ' + logPath)
log = open(logPath, 'r', encoding="utf8")
log.readline()
log.readline()
characterLine = log.readline()
try:
character, language = ProcessCharacterLine(characterLine)
except BadLogException:
logging.info("Log " + logPath + " is not a character log.")
return
log.close()
if len(self.menuEntries) == 0:
self.characterMenu.menu.delete(0)
for i in range(len(self.menuEntries)):
if (character == self.menuEntries[i]):
try:
newLogReader = LogReader(logPath, self.mainWindow)
except BadLogException:
return
self.logReaders[i] = newLogReader
return
try:
newLogReader = LogReader(logPath, self.mainWindow)
except BadLogException:
return
self.logReaders.append(newLogReader)
self.characterMenu.menu.insert_radiobutton(0, label=character, variable=self.selectedIndex,
value=len(self.menuEntries), command=self.catchupLog)
self.menuEntries.append(character)
def stop(self):
self.observer.stop()
def playbackLog(self, logPath):
try:
self.mainWindow.animator.dataQueue = None
self.playbackLogReader = PlaybackLogReader(logPath, self.mainWindow)
self.mainWindow.addPlaybackFrame(self.playbackLogReader.startTimeLog, self.playbackLogReader.endTimeLog)
except BadLogException:
self.playbackLogReader = None
def stopPlayback(self):
self.playbackLogReader = None
self.mainWindow.removePlaybackFrame()
def readLog(self):
if (self.playbackLogReader):
return self.playbackLogReader.readLog()
elif (len(self.menuEntries) > 0):
return self.logReaders[self.selectedIndex.get()].readLog()
else:
return _emptyResult
def catchupLog(self):
self.mainWindow.animator.catchup()
try:
self.logReaders[self.selectedIndex.get()].catchup()
except IndexError:
pass
class BaseLogReader():
def __init__(self, logPath, mainWindow):
self.mainWindow = mainWindow
def createOverviewRegex(self, overviewSettings):
if overviewSettings:
def safeGetIndex(elem, _list):
try:
return _list.index(elem)
except ValueError:
return 10
try:
keyLambda = lambda e: safeGetIndex(e[0], overviewSettings['shipLabelOrder'])
sortedShipLabels = sorted(overviewSettings['shipLabels'], key=keyLambda)
pilotAndWeaponRegex = "(?:(?:.*ffffffff>"
for shipLabel in sortedShipLabels[:]:
shipLabel[1] = dict(shipLabel[1])
if not shipLabel[1]['state']:
if shipLabel[1]['type'] in ['pilot name', 'ship type']:
identifier = shipLabel[1]['type'].split()[0]
pilotAndWeaponRegex += '(?P<'+identifier+'>)'
continue
if shipLabel[1]['type'] == None:
safePre = re.escape(shipLabel[1]['pre'])
pilotAndWeaponRegex += '(?:'+safePre+')?'
elif shipLabel[1]['type'] in ['alliance', 'corporation', 'ship name']:
safePre = re.escape(shipLabel[1]['pre'])
safePost = re.escape(shipLabel[1]['post'])
pilotAndWeaponRegex += '(?:'+safePre+'.*?'+safePost+')?'
elif shipLabel[1]['type'] in ['pilot name', 'ship type']:
safePre = re.escape(shipLabel[1]['pre'])
safePost = re.escape(shipLabel[1]['post'])
identifier = shipLabel[1]['type'].split()[0]
pilotAndWeaponRegex += '(?:'+safePre+'(?:<localized .*?>)?(?P<'+identifier+'>.*?)'+safePost+')'
else:
continue
pilotAndWeaponRegex += ".*> \-(?: (?:<localized .*?>)?(?P<weapon>.*?)(?: \-|<)|.*))"
pilotAndWeaponRegex += '|' + _logLanguageRegex[self.language]['pilotAndWeapon'] + ')?'
return pilotAndWeaponRegex
except Exception as e:
logging.error('error parsing overview settings: ' + str(e))
return None
else:
return None
def compileRegex(self):
basicPilotAndWeaponRegex = _logLanguageRegex[self.language]['pilotAndWeapon']
basicPilotAndWeaponRegex += '(?P<pilot>)(?P<ship>)(?P<weapon>)'
overviewSettings = settings.getOverviewSettings(self.character)
pilotAndWeaponRegex = self.createOverviewRegex(overviewSettings) or basicPilotAndWeaponRegex
self.damageOutRegex = re.compile(_logLanguageRegex[self.language]['damageOut'] + basicPilotAndWeaponRegex)
self.damageInRegex = re.compile(_logLanguageRegex[self.language]['damageIn'] + basicPilotAndWeaponRegex)
self.armorRepairedOutRegex = re.compile(_logLanguageRegex[self.language]['armorRepairedOut'] + pilotAndWeaponRegex)
self.hullRepairedOutRegex = re.compile(_logLanguageRegex[self.language]['hullRepairedOut'] + pilotAndWeaponRegex)
self.shieldBoostedOutRegex = re.compile(_logLanguageRegex[self.language]['shieldBoostedOut'] + pilotAndWeaponRegex)
self.armorRepairedInRegex = re.compile(_logLanguageRegex[self.language]['armorRepairedIn']+ pilotAndWeaponRegex)
self.hullRepairedInRegex = re.compile(_logLanguageRegex[self.language]['hullRepairedIn'] + pilotAndWeaponRegex)
self.shieldBoostedInRegex = re.compile(_logLanguageRegex[self.language]['shieldBoostedIn'] + pilotAndWeaponRegex)
self.capTransferedOutRegex = re.compile(_logLanguageRegex[self.language]['capTransferedOut'] + pilotAndWeaponRegex)
self.capNeutralizedOutRegex = re.compile(_logLanguageRegex[self.language]['capNeutralizedOut'] + pilotAndWeaponRegex)
self.nosRecievedRegex = re.compile(_logLanguageRegex[self.language]['nosRecieved'] + pilotAndWeaponRegex)
self.capTransferedInRegex = re.compile(_logLanguageRegex[self.language]['capTransferedIn'] + pilotAndWeaponRegex)
#add nos recieved to this group in readlog
self.capNeutralizedInRegex = re.compile(_logLanguageRegex[self.language]['capNeutralizedIn'] + pilotAndWeaponRegex)
self.nosTakenRegex = re.compile(_logLanguageRegex[self.language]['nosTaken'] + pilotAndWeaponRegex)
self.minedRegex = re.compile(_logLanguageRegex[self.language]['mined'])
def readLog(self, logData):
damageOut = self.extractValues(self.damageOutRegex, logData)
damageIn = self.extractValues(self.damageInRegex, logData)
logisticsOut = self.extractValues(self.armorRepairedOutRegex, logData)
logisticsOut.extend(self.extractValues(self.hullRepairedOutRegex, logData))
logisticsOut.extend(self.extractValues(self.shieldBoostedOutRegex, logData))
logisticsIn = self.extractValues(self.armorRepairedInRegex, logData)
logisticsIn.extend(self.extractValues(self.hullRepairedInRegex, logData))
logisticsIn.extend(self.extractValues(self.shieldBoostedInRegex, logData))
capTransfered = self.extractValues(self.capTransferedOutRegex, logData)
capRecieved = self.extractValues(self.capTransferedInRegex, logData)
capRecieved.extend(self.extractValues(self.nosRecievedRegex, logData))
capDamageDone = self.extractValues(self.capNeutralizedOutRegex, logData)
capDamageDone.extend(self.extractValues(self.nosRecievedRegex, logData))
capDamageRecieved = self.extractValues(self.capNeutralizedInRegex, logData)
capDamageRecieved.extend(self.extractValues(self.nosTakenRegex, logData))
mined = self.extractValues(self.minedRegex, logData, mining=True)
return damageOut, damageIn, logisticsOut, logisticsIn, capTransfered, capRecieved, capDamageDone, capDamageRecieved, mined
def extractValues(self, regex, logData, mining=False):
returnValue = []
group = regex.finditer(logData)
if mining:
for match in group:
amount = match.group(1)
_type = match.group(2)
if amount != 0:
returnGroup = {}
if settings.getMiningM3Setting():
if _type in _oreVolume:
returnGroup['amount'] = int(amount) * _oreVolume[_type]
else:
returnGroup['amount'] = int(amount)
else:
returnGroup['amount'] = int(amount)
returnValue.append(returnGroup)
return returnValue
for match in group:
amount = match.group(1) or 0
pilotName = match.group('default_pilot') or match.group('pilot') or '?'
shipType = match.group('ship') or match.group('default_ship') or pilotName
weaponType = match.group('default_weapon') or match.group('weapon') or 'Unknown'
if amount != 0:
returnGroup = {}
returnGroup['amount'] = int(amount)
returnGroup['pilotName'] = pilotName.strip()
returnGroup['shipType'] = shipType
returnGroup['weaponType'] = weaponType
returnValue.append(returnGroup)
return returnValue
class PlaybackLogReader(BaseLogReader):
def __init__(self, logPath, mainWindow):
super().__init__(logPath, mainWindow)
logging.info('Processing playback log file: ' + logPath)
self.mainWindow = mainWindow
self.paused = False
self.logPath = logPath
try:
self.log = open(logPath, 'r', encoding="utf8")
self.log.readline()
self.log.readline()
except:
messagebox.showerror("Error", "This doesn't appear to be a EVE log file.\nPlease select a different file.")
raise BadLogException("not character log")
characterLine = self.log.readline()
try:
self.character, self.language = ProcessCharacterLine(characterLine)
except BadLogException:
messagebox.showerror("Error", "This doesn't appear to be a EVE combat log.\nPlease select a different file.")
raise BadLogException("not character log")
logging.info('Log language is ' + self.language)
startTimeRegex = _logLanguageRegex[self.language]['sessionTime']
self.startTimeLog = datetime.datetime.strptime(re.search(startTimeRegex, self.log.readline()).group(0), "%Y.%m.%d %X")
self.log.readline()
self.logLine = self.log.readline()
while (self.logLine == "------------------------------------------------------------\n"):
self.log.readline()
collisionCharacter, language = ProcessCharacterLine(self.log.readline())
#Since we currently don't have a use for characters during playback, this is not needed for now.
#messagebox.showerror("Error", "Log file collision on characters:\n\n" + character + " and " + collisionCharacter +
# "\n\nThis happens when both characters log in at exactly the same second.\n" +
# "This makes it impossible to know which character owns this log.\n\n" +
# "Playback will continue\nlog file:\n" + logPath)
self.log.readline()
self.log.readline()
self.logLine = self.log.readline()
self.timeRegex = re.compile("^\[ .*? \]")
self.nextLine = self.logLine
self.nextTime = datetime.datetime.strptime(self.timeRegex.findall(self.nextLine)[0], "[ %Y.%m.%d %X ]")
self.startTimeDelta = datetime.datetime.utcnow() - self.startTimeLog
self.compileRegex()
#inefficient, but ok for our normal log size
endOfLog = open(logPath, 'r', encoding="utf8")
line = endOfLog.readline()
while ( line != '' ):
line = endOfLog.readline()
try:
nextTimeString = self.timeRegex.findall(line)[0]
except IndexError:
continue
self.endTimeLog = datetime.datetime.strptime(nextTimeString, "[ %Y.%m.%d %X ]")
endOfLog.close()
endOfLog = open(logPath, 'r', encoding="utf8")
line = endOfLog.readline()
self.logEntryFrequency = [0] * (self.endTimeLog - self.startTimeLog).seconds
while ( line != '' ):
line = endOfLog.readline()
try:
nextTimeString = self.timeRegex.findall(line)[0]
entryTime = datetime.datetime.strptime(nextTimeString, "[ %Y.%m.%d %X ]")
self.logEntryFrequency[(entryTime - self.startTimeLog).seconds] += 1
except IndexError:
continue
endOfLog.close()
def newStartTime(self, newTime):
self.log.close()
self.log = open(self.logPath, 'r', encoding="utf8")
self.startTimeDelta = datetime.datetime.utcnow() - newTime
self.nextTime = self.startTimeLog
while ( self.nextTime < newTime ):
line = self.log.readline()
try:
nextTimeString = self.timeRegex.findall(line)[0]
except IndexError:
continue
self.nextTime = datetime.datetime.strptime(nextTimeString, "[ %Y.%m.%d %X ]")
self.nextLine = line
def readLog(self):
if self.paused:
return _emptyResult
logData = ""
logReaderTime = datetime.datetime.utcnow() - self.startTimeDelta
self.mainWindow.playbackFrame.timeSlider.set((logReaderTime - self.startTimeLog).seconds)
while ( self.nextTime < logReaderTime ):
logData += self.nextLine
self.nextLine = self.log.readline()
if (self.nextLine == ''):
self.mainWindow.playbackFrame.pauseButtonRelease(None)
return _emptyResult
try:
nextTimeString = self.timeRegex.findall(self.nextLine)[0]
except IndexError:
continue
self.nextTime = datetime.datetime.strptime(nextTimeString, "[ %Y.%m.%d %X ]")
return super().readLog(logData)
class LogReader(BaseLogReader):
def __init__(self, logPath, mainWindow):
super().__init__(logPath, mainWindow)
self.log = open(logPath, 'r', encoding="utf8")
self.log.readline()
self.log.readline()
characterLine = self.log.readline()
self.character, self.language = ProcessCharacterLine(characterLine)
logging.info('Log language is ' + self.language)
self.log.readline()
self.log.readline()
self.logLine = self.log.readline()
if (self.logLine == "------------------------------------------------------------\n"):
self.log.readline()
collisionCharacter, language = ProcessCharacterLine(self.log.readline())
logging.error('Log file collision on characters' + self.character + " and " + collisionCharacter)
messagebox.showerror("Error", "Log file collision on characters:\n\n" + self.character + " and " + collisionCharacter +
"\n\nThis happens when both characters log in at exactly the same second.\n" +
"This makes it impossible to know which character owns which log.\n\n" +
"Please restart the client of the character you want to track to use this program.\n" +
"If you already did, you can ignore this message, or delete this log file:\n" + logPath)
raise BadLogException("log file collision")
self.log.read()
self.compileRegex()
def readLog(self):
logData = self.log.read()
return super().readLog(logData)
def catchup(self):
self.log.read()
class BadLogException(Exception):
pass
def ProcessCharacterLine(characterLine):
for language, regex in _logLanguageRegex.items():
character = re.search(regex['character'], characterLine)
if character:
return character.group(0), language
raise BadLogException("not character log")
|
ArtificialQualia/PyEveLiveDPS
|
PyEveLiveDPS/logreader.py
|
Python
|
gpl-3.0
| 31,630
|
# -*- coding: UTF-8 -*-
#######################################################################
# ----------------------------------------------------------------------------
# "THE BEER-WARE LICENSE" (Revision 42):
# @tantrumdev wrote this file. As long as you retain this notice you
# can do whatever you want with this stuff. If we meet some day, and you think
# this stuff is worth it, you can buy me a beer in return. - Muad'Dib
# ----------------------------------------------------------------------------
#######################################################################
# Addon Name: Placenta
# Addon id: plugin.video.placenta
# Addon Provider: Mr.Blamo
import re,traceback,urlparse,urllib,base64
from resources.lib.modules import cleantitle
from resources.lib.modules import client
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['hdmega.net']
self.base_link = 'http://hdmega.net'
self.search_link = '/search-movies/%s.html'
def movie(self, imdb, title, localtitle, aliases, year):
try:
clean_title = cleantitle.geturl(title)
search_url = urlparse.urljoin(self.base_link, self.search_link % clean_title.replace('-', '+'))
results = client.request(search_url)
results = client.parseDOM(results, 'div', {'id': 'movie-featured'})
results = [(client.parseDOM(i, 'a', ret='href'),
re.findall('.+?elease:\s*(\d{4})</', i),
re.findall('<b><i>(.+?)</i>', i)) for i in results]
results = [(i[0][0], i[1][0], i[2][0]) for i in results if
(cleantitle.get(i[2][0]) == cleantitle.get(title) and i[1][0] == year)]
url = results[0][0]
return url
except:
failure = traceback.format_exc()
log_utils.log('HDMega - Exception: \n' + str(failure))
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
failure = traceback.format_exc()
log_utils.log('HDMega - Exception: \n' + str(failure))
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['premiered'], url['season'], url['episode'] = premiered, season, episode
try:
clean_title = cleantitle.geturl(url['tvshowtitle'])+'-season-%d' % int(season)
search_url = urlparse.urljoin(self.base_link, self.search_link % clean_title.replace('-', '+'))
search_results = client.request(search_url)
parsed = client.parseDOM(search_results, 'div', {'id': 'movie-featured'})
parsed = [(client.parseDOM(i, 'a', ret='href'), re.findall('<b><i>(.+?)</i>', i)) for i in parsed]
parsed = [(i[0][0], i[1][0]) for i in parsed if cleantitle.get(i[1][0]) == cleantitle.get(clean_title)]
url = parsed[0][0]
except:
pass
data = client.request(url)
data = client.parseDOM(data, 'div', attrs={'id': 'details'})
data = zip(client.parseDOM(data, 'a'), client.parseDOM(data, 'a', ret='href'))
url = [(i[0], i[1]) for i in data if i[0] == str(int(episode))]
return url[0][1]
except:
failure = traceback.format_exc()
log_utils.log('HDMega - Exception: \n' + str(failure))
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
html = client.request(url)
try:
v = re.findall('document.write\(Base64.decode\("(.+?)"\)', html)[0]
b64 = base64.b64decode(v)
url = client.parseDOM(b64, 'iframe', ret='src')[0]
try:
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': 'SD', 'language': 'en', 'url': url.replace('\/', '/'), 'direct': False, 'debridonly': False})
except:
pass
except:
pass
parsed = client.parseDOM(html, 'div', {'class': 'server_line'})
parsed = [(client.parseDOM(i, 'a', ret='href')[0], client.parseDOM(i, 'p', attrs={'class': 'server_servername'})[0]) for i in parsed]
if parsed:
for i in parsed:
try:
host = re.sub('Server|Link\s*\d+', '', i[1]).lower()
url = i[0]
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
if 'other'in host: continue
sources.append({'source': host, 'quality': 'SD', 'language': 'en', 'url': url.replace('\/', '/'), 'direct': False, 'debridonly': False})
except:
pass
return sources
except:
failure = traceback.format_exc()
log_utils.log('HDMega - Exception: \n' + str(failure))
return sources
def resolve(self, url):
if self.base_link in url:
url = client.request(url)
v = re.findall('document.write\(Base64.decode\("(.+?)"\)', url)[0]
b64 = base64.b64decode(v)
url = client.parseDOM(b64, 'iframe', ret='src')[0]
return url
|
felipenaselva/felipe.repository
|
script.module.placenta/lib/resources/lib/sources/en/hdmega.py
|
Python
|
gpl-2.0
| 5,966
|
# Copyright (C) 2007-2012, Stefan Schwarzer <sschwarzer@sschwarzer.net>
# See the file LICENSE for licensing terms.
import ntpath
import os
import shutil
import StringIO
import sys
import unittest
import ftputil
from ftputil import ftp_sync
# Assume the test subdirectories are or will be in the current directory.
TEST_ROOT = os.getcwd()
class TestLocalToLocal(unittest.TestCase):
def setUp(self):
if not os.path.exists("test_empty"):
os.mkdir("test_empty")
if os.path.exists("test_target"):
shutil.rmtree("test_target")
os.mkdir("test_target")
def test_sync_empty_dir(self):
source = ftp_sync.LocalHost()
target = ftp_sync.LocalHost()
syncer = ftp_sync.Syncer(source, target)
source_dir = os.path.join(TEST_ROOT, "test_empty")
target_dir = os.path.join(TEST_ROOT, "test_target")
syncer.sync(source_dir, target_dir)
def test_source_with_and_target_without_slash(self):
source = ftp_sync.LocalHost()
target = ftp_sync.LocalHost()
syncer = ftp_sync.Syncer(source, target)
source_dir = os.path.join(TEST_ROOT, "test_source/")
target_dir = os.path.join(TEST_ROOT, "test_target")
syncer.sync(source_dir, target_dir)
# Helper classes for `TestUploadFromWindows`
class LocalWindowsHostPath(object):
def __getattr__(self, name):
return getattr(ntpath, name)
class LocalWindowsHost(ftp_sync.LocalHost):
def __init__(self):
self.path = LocalWindowsHostPath()
self.sep = u"\\"
def open(self, path, mode):
# Just return a dummy file object.
return StringIO.StringIO(u"")
def walk(self, root):
"""
Return a list of tuples as `os.walk`, but use tuples as if the
directory structure was
<root>
dir1
dir11
file1
file2
where <root> is the string passed in as `root`.
"""
join = ntpath.join
return [(root,
[join(root, u"dir1")],
[]),
(join(root, u"dir1"),
[u"dir11"],
[u"file1", u"file2"])
]
class DummyFTPSession(object):
def pwd(self):
return u"/"
def dir(self, *args):
# Called by `_check_list_a_option`, otherwise not used.
pass
class DummyFTPPath(object):
def abspath(self, path):
# Don't care here if the path is absolute or not.
return path
def isdir(self, path):
return path[:-1].endswith(u"dir")
def isfile(self, path):
return path[:-1].endswith(u"file")
class ArgumentCheckingFTPHost(ftputil.FTPHost):
def __init__(self, *args, **kwargs):
super(ArgumentCheckingFTPHost, self).__init__(*args, **kwargs)
self.path = DummyFTPPath()
def _make_session(self, *args, **kwargs):
return DummyFTPSession()
def mkdir(self, path):
assert u"\\" not in path
def open(self, path, mode):
assert u"\\" not in path
return StringIO.StringIO(u"")
class TestUploadFromWindows(unittest.TestCase):
def test_no_mixed_separators(self):
source = LocalWindowsHost()
target = ArgumentCheckingFTPHost()
local_root = ntpath.join(u"some", u"directory")
syncer = ftp_sync.Syncer(source, target)
# If the following call raises any `AssertionError`s, the
# `unittest` framework will catch them and show them.
syncer.sync(local_root, u"not_used_by_ArgumentCheckingFTPHost")
if __name__ == '__main__':
unittest.main()
|
rathodmartin/oc
|
tools/tst/ftputil-2.8/test/test_ftp_sync.py
|
Python
|
epl-1.0
| 3,655
|
from datetime import date, datetime
from django.contrib.auth.decorators import permission_required
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
from django.utils.decorators import method_decorator
from django.conf import settings
from income.forms import CuentasCobrarPromotorForm, CuentasCobrarPadresForm, CuentasCobrarPromotorDetalleForm
from django.db.models import Q
from income.models import calculo_ingresos_promotor, obtener_mes, calculo_ingresos_alumno, calculo_por_nivel_promotor
from django.views.generic import FormView, TemplateView
from django.shortcuts import render
from register.models import Cajero
from enrollment.models import Cuentascobrar, Matricula
from register.models import Colegio, Alumno, Apoderado, Promotor, PersonalColegio, Personal
from profiles.models import Profile
from income.models import Cobranza, DetalleCobranza
from cash.models import CajaCajero
from utils.middleware import get_current_colegio, get_current_user, validar_roles
from utils.views import MyLoginRequiredMixin
from django.http import HttpResponseRedirect
from django.conf import settings
import logging
logger = logging.getLogger("project")
class FiltrarCuentas(MyLoginRequiredMixin, TemplateView):
"""
"""
template_name = "filtrar_cuentas.html"
cuentas = []
#@method_decorator(permission_required('income.Registrar_Pago_List', login_url=settings.REDIRECT_PERMISOS,
# raise_exception=False))
def get(self, request, *args, **kwargs):
roles = ['cajero']
if validar_roles(roles=roles):
logger.info("Se tienen los permisos de cajero")
else:
return HttpResponseRedirect(settings.REDIRECT_PERMISOS)
try:
usuario = get_current_user()
mov = CajaCajero.objects.get(estado=True, usuario_modificacion= str(usuario.id))
alerta = False
except:
alerta = True
dato = 1
logger.info("Estoy en income pagos")
try:
dato = request.GET['dato']
logger.info("Ver si existe un GET")
print(request.GET['filter'])
if request.GET['filter'] == 'DNI':
print("filtro DNI")
alumnos = Alumno.objects.filter(numero_documento=request.GET['dato'])
else:
alumnos = Alumno.objects.filter(apellido_pa__icontains = request.GET['dato'].upper())
except:
self.cuentas = []
alumnos = []
alumnos1 = []
for alumno in alumnos:
try:
matricula = Matricula.objects.get(colegio=get_current_colegio(), activo=True, alumno= alumno)
alumnos1.append(alumno)
except:
logger.info("no pertenece al colegio")
return render(request, template_name=self.template_name, context={
'alerta':alerta,
'dato':dato,
'alumnos': alumnos1,
})
class RegistrarPagoListView(MyLoginRequiredMixin, TemplateView):
"""
"""
template_name = "registrarpago_form.html"
cuentas = []
#@method_decorator(permission_required('income.Registrar_Pago_List', login_url=settings.REDIRECT_PERMISOS,
# raise_exception=False))
def get(self, request, *args, **kwargs):
"""tiposervicio = self.model.objects.get(pk = int(request.GET['tiposervicio']))
for servicio in tiposervicio.getServiciosAsociados():
servicio.activo = False
servicio.save()
tiposervicio.activo = False
tiposervicio.save()
"""
#cuentas = Cuentascobrar.objects.all()
roles = ['cajero']
if validar_roles(roles=roles):
logger.info("Se tienen los permisos de cajero")
else:
return HttpResponseRedirect(settings.REDIRECT_PERMISOS)
try:
usuario = get_current_user()
mov = CajaCajero.objects.get(estado=True, usuario_modificacion= str(usuario.id))
alerta = False
except:
alerta = True
dato = 1
logger.info("Estoy en income pagos")
cuentas_totales = Cuentascobrar.objects.filter(
matricula__colegio__id_colegio=self.request.session.get('colegio'), estado=True, activo=True)
try:
dato = request.GET['dato']
logger.info("Ver si existe un GET")
print(request.GET['filter'])
if request.GET['filter'] == 'DNI':
print("filtro DNI")
self.cuentas = cuentas_totales.filter(matricula__alumno__numero_documento=request.GET['dato'].upper(),activo=True, estado=True).order_by("fecha_ven")
alumno = Alumno.objects.get(numero_documento=request.GET['dato'])
else:
self.cuentas = cuentas_totales.filter(matricula__alumno__apellido_pa__icontains= request.GET['dato'].upper(),activo=True, estado=True).order_by("fecha_ven")
alumno = Alumno.objects.get(apellido_pa__icontains = request.GET['dato'].upper())
except:
self.cuentas = []
logger.info(self.cuentas)
try:
pknum = alumno.id_alumno
except:
pknum = None
return render(request, template_name=self.template_name, context={
'alerta':alerta,
'dato':dato,
'cuentascobrar': self.cuentas,
'pknum': pknum,
})
#return HttpResponseRedirect(reverse('enrollments:tiposervicio_list'))
def post(self, request, *args, **kwargs):
logger.info("Estoy en el POST")
logger.info(request.POST)
data_post = request.POST
self.cuentas = Cuentascobrar.objects.filter(matricula__alumno__id_alumno=request.POST['persona'], activo=True, estado=True).order_by("fecha_ven")
lista_cuentas = []
lista_montos = []
logger.info(self.cuentas)
for cuenta in self.cuentas:
try:
logger.info('Iniciando el Try')
texto_seleccion = "seleccionado{0}".format(cuenta.id_cuentascobrar)
logger.info(data_post[texto_seleccion])
text1 = "optionsRadios{0}".format(cuenta.id_cuentascobrar)
text2 = "montoparcial{0}".format(cuenta.id_cuentascobrar)
logger.info('Iniciando el If')
logger.info(data_post[text1] is "1")
if data_post[text1] is "1":
logger.info('Pago Completo')
lista_cuentas.append(cuenta)
logger.info(lista_cuentas)
lista_montos.append(cuenta.deuda)
logger.info('Monto {0}'.format(cuenta.deuda))
else:
logger.info('Pago Parcial')
lista_cuentas.append(cuenta)
lista_montos.append(float(data_post[text2]))
logger.info('Monto {0}'.format(float(data_post[text2])))
except:
logger.info("no pude registrar")
logger.info(lista_cuentas)
logger.info(lista_montos)
datos_contexto = self.detalles(lista_cuentas,lista_montos)
return render(request, template_name="detalles_pago.html", context=datos_contexto)
def detalles(self,lista_cuentas,lista_montos):
total = 0
for monto in lista_montos:
total = total + monto
logger.info("Estoy en Detalles")
logger.info(self.request.user)
cobranza_actual =self.CrearDetallesCobranza(lista_cuentas,lista_montos,total)
datos_contexto = {}
datos_contexto['colegio'] = Colegio.objects.get(pk = self.request.session.get('colegio'))
colegio = Colegio.objects.get(pk = self.request.session.get('colegio'))
colegio.numero_recibo = colegio.numero_recibo + 1
colegio.save()
datos_contexto['alumno'] = lista_cuentas[0].matricula.alumno
datos_contexto['tiposervicio'] = lista_cuentas[0].matricula.tipo_servicio
datos_contexto['fecha'] = datetime.today()
datos_contexto['cajero'] = Profile.objects.get(user=self.request.user)
datos_contexto['servicios'] = DetalleCobranza.objects.filter(cobranza=cobranza_actual)
datos_contexto['cobranza'] = cobranza_actual.id_cobranza
datos_contexto['subtotal'] = total
datos_contexto['descuento'] = 0
datos_contexto['total'] = total
return datos_contexto
def CrearDetallesCobranza(self,lista_cuentas,lista_montos,total):
logger.info("Estoy en crear detalles")
#logger.info(CajaCajero.objects.get(estado=True,personal_colegio__personal__user=self.request.user))
usuario = get_current_user()
cobranza_actual = Cobranza(
movimiento= CajaCajero.objects.get(estado=True,usuario_creacion=str(usuario.id)),
fecha_pago = date.today(),
monto = total,
medio_pago = 1,
num_operacion = "1234"
)
cobranza_actual.save()
movi= CajaCajero.objects.get(estado=True,usuario_creacion=str(usuario.id))
movi.ventas = movi.ventas + total
movi.save()
for k in range(len(lista_cuentas)):
cuenta = lista_cuentas[k]
print(k)
monto = lista_montos[k]
detalle_actual = DetalleCobranza(
cuentascobrar=cuenta,
cobranza=cobranza_actual,
monto=monto
)
detalle_actual.save()
if monto is cuenta.deuda:
cuenta.deuda = 0
cuenta.estado = False
else:
cuenta.deuda = cuenta.deuda - monto
cuenta.save()
return cobranza_actual
"""
PADRES: PAGOS REALIZADOS POR HIJO, AÑO, MES Y ESTADO
"""
class ControlIngresosPadresView(FormView):
model = Cuentascobrar
template_name = "control_ingresos_padres.html"
form_class = CuentasCobrarPadresForm
def cargarformPadres(self, request):
# Obtiene el colegio en cuestión
id_colegio = get_current_colegio()
colegio = Colegio.objects.get(pk=id_colegio)
# logger.debug("Colegio: " + colegio.nombre)
# Obtiene el usuario que ha iniciado sesión
user = get_current_user()
logger.debug("Usuario: " + user.name)
try:
profile = Profile.objects.get(user=user)
logger.debug("profile: " + str(profile.id_persona))
except Profile.DoesNotExist:
sw_error = True
mensaje_error = "No existe la Persona asociada al usuario"
try:
# 1. Verificamos que el usuario sea un apoderado
apoderado = Apoderado.objects.get(persona=profile)
logger.debug("apoderado: " + str(apoderado.id_apoderado))
# 2. Verificamos los alumnos que tienen el apoderado y el colegio de la sesión
matriculas = Matricula.objects.filter(colegio=colegio, alumno__apoderados=apoderado)
if matriculas.count() == 0:
sw_error = True
mensaje_error = "No es un apoderado de un alumno asociado al colegio"
else:
sw_error = False
except Apoderado.DoesNotExist:
sw_error = True
mensaje_error = "No es un apoderado"
if sw_error != True:
# Cargamos los alumnos
alumnos = []
for apo_alu in matriculas:
alumnos.append(apo_alu.alumno)
# Cargamos los años
anio = datetime.today().year
anios = []
for i in range(0, 3):
anios.append(anio - i)
# Cargamos los meses
meses_todos = ["Todos", "Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto",
"Setiembre", "Octubre", "Noviembre", "Diciembre"]
num_mes = datetime.today().month
meses = []
for i in range(0, num_mes + 1):
meses.append(meses_todos[i])
# Cargamos los estados
estados = ["Todos", "Pagado", "No pagado"]
return {'alumnos': alumnos, 'anios': anios, 'meses': meses_todos, 'estados': estados}
else:
return {'mensaje_error': mensaje_error} # return context
@method_decorator(permission_required('enrollment.control_ingresos_padres', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def get(self, request, *args, **kwargs):
super(ControlIngresosPadresView, self).get(request, *args, **kwargs)
contexto = self.cargarformPadres(request)
if 'mensaje_error' in contexto.keys():
return HttpResponseRedirect(settings.REDIRECT_PERMISOS)
else:
return render(request, self.template_name, contexto) # return context
@method_decorator(permission_required('enrollment.control_ingresos_padres', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def get_queryset(self):
return []
@method_decorator(permission_required('enrollment.control_ingresos_padres', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def post(self, request, *args, **kwargs):
alumno = request.POST["alumno"]
logger.info("El alumno ingresado es {0}".format(alumno))
anio = request.POST["anio"]
logger.debug("El año ingresado es {0}".format(anio))
mes = request.POST["mes"]
logger.debug("El mes ingresado es {0}".format(mes))
estado = request.POST["estado"]
logger.debug("El tipo o estado ingresado es {0}".format(estado))
# Validación de hijos asociados a un apoderado
alumno = int(alumno)
id_colegio = get_current_colegio()
cuenta_padres = calculo_ingresos_alumno(id_colegio, alumno, anio, mes, estado)
contexto = self.cargarformPadres(request)
if len(cuenta_padres) != 0:
contexto['object_list'] = cuenta_padres
return render(request, template_name=self.template_name, context=contexto)
else:
contexto['object_list'] = []
return render(request, template_name=self.template_name,context=contexto)
"""
PROMOTOR: DEUDAS Y COBROS POR AÑO, MES Y NIVEL
"""
class ControlIngresosPromotorView(FormView):
model = Cuentascobrar
template_name = "control_ingresos_director.html"
form_class = CuentasCobrarPromotorForm
def cargarformPromotor(self, request):
roles = ['promotor', 'director']
if validar_roles(roles=roles):
# Cargamos los años
anio = datetime.today().year
anios = []
anios.append(anio + 1)
for i in range(0, 3):
anios.append(anio - i)
# Cargamos los meses
meses_todos = ["Todos", "Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto",
"Setiembre", "Octubre", "Noviembre", "Diciembre"]
num_mes = datetime.today().month
meses = []
for i in range(0, num_mes + 1):
meses.append(meses_todos[i])
return {'anio_actual': anio, 'anios': anios, 'meses': meses_todos}
else:
mensaje_error = "No tienes acceso a esta vista"
return {'mensaje_error': mensaje_error} # return context
@method_decorator(
permission_required('enrollment.control_ingresos_promotor', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def get(self, request, *args, **kwargs):
super(ControlIngresosPromotorView, self).get(request, *args, **kwargs)
contexto = self.cargarformPromotor(request)
if 'mensaje_error' in contexto.keys():
return HttpResponseRedirect(settings.REDIRECT_PERMISOS)
else:
return render(request, self.template_name, contexto) # return context
@method_decorator(
permission_required('enrollment.control_ingresos_promotor', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def get_queryset(self):
return []
@method_decorator(
permission_required('enrollment.control_ingresos_promotor', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def post(self, request, *args, **kwargs):
id_colegio = get_current_colegio()
anio = request.POST["anio"]
mes = request.POST["mes"]
logger.info(anio)
logger.info(mes)
if anio == str(date.today().year):
num_mes = date.today().month
else:
num_mes = 12
anio = int(anio)
por_cobrar_total, cobro_total, deuda_total = calculo_ingresos_promotor(id_colegio, anio, mes)
por_cobrar_grado, cobro_total_grado, deuda_total_grado = calculo_por_nivel_promotor(id_colegio, anio, mes)
meses = ["Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto", "Setiembre", "Octubre",
"Noviembre", "Diciembre"]
mes_labels = []
for i in range(0, num_mes):
mes_labels.append(meses[i])
logger.info(mes_labels)
contexto = self.cargarformPromotor(request)
contexto['por_cobrar_grado'] = por_cobrar_grado
contexto['cobro_total_grado'] = cobro_total_grado
contexto['deuda_total_grado'] = deuda_total_grado
contexto['por_cobrar_total'] = por_cobrar_total
contexto['cobro_total'] = cobro_total
contexto['deuda_total'] = deuda_total
contexto['mes_labels'] = mes_labels
contexto['mes_llega'] = mes
contexto['anio_llega'] = anio
return render(request, template_name=self.template_name, context = contexto)
"""
PROMOTOR: DETALLE DE PAGOS REALIZADOS POR HIJO, AÑO, MES Y ESTADO
"""
class ControlIngresosPromotorDetallesView(FormView):
model = Cuentascobrar
template_name = "control_ingresos_promotor_detalle.html"
form_class = CuentasCobrarPromotorDetalleForm
def cargarformPromotordetalle(self, request):
roles = ['promotor', 'director']
if validar_roles(roles=roles):
# Cargamos los años
anio = datetime.today().year
anios = []
anios.append(anio + 1)
for i in range(0, 3):
anios.append(anio - i)
# Cargamos los meses
meses_todos = ["Todos", "Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto",
"Setiembre", "Octubre", "Noviembre", "Diciembre"]
num_mes = datetime.today().month
meses = []
for i in range(0, num_mes + 1):
meses.append(meses_todos[i])
# Cargamos los estados
estados = ["Todos", "Pagado", "No pagado"]
return {'anio_actual':anio, 'anios': anios, 'meses': meses_todos, 'estados': estados}
else:
mensaje_error = "No tienes acceso a esta vista"
return {'mensaje_error': mensaje_error} # return context
@method_decorator(
permission_required('enrollment.control_ingresos_promotor_detalle', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def get(self, request, *args, **kwargs):
super(ControlIngresosPromotorDetallesView, self).get(request, *args, **kwargs)
contexto = self.cargarformPromotordetalle(request)
if 'mensaje_error' in contexto.keys():
return HttpResponseRedirect(settings.REDIRECT_PERMISOS)
else:
return render(request, self.template_name, contexto) # return context
@method_decorator(
permission_required('enrollment.control_ingresos_promotor_detalle', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def get_queryset(self):
return []
@method_decorator(
permission_required('enrollment.control_ingresos_promotor_detalle', login_url=settings.REDIRECT_PERMISOS,
raise_exception=False))
def post(self, request, *args, **kwargs):
alumno = request.POST["alumno"]
anio = request.POST["anio"]
mes = request.POST["mes"]
estado = request.POST["estado"]
logger.info(alumno)
colegio = get_current_colegio()
# Proceso de filtrado según el colegio
cuentas_cobrar_colegio = self.model.objetos.filter(matricula__colegio__id_colegio=colegio, activo=True).order_by('fecha_ven')
# Proceso de filtrado según el alumno
if alumno == "":
por_cobrar1 = cuentas_cobrar_colegio
else:
por_cobrar1 = cuentas_cobrar_colegio.filter(Q(matricula__alumno__nombre__icontains=alumno.upper()) |
Q(matricula__alumno__segundo_nombre__icontains=alumno.upper()) |
Q(matricula__alumno__apellido_pa__icontains=alumno.upper()) |
Q(matricula__alumno__apellido_ma__icontains=alumno.upper()))
# Proceso de filtrado según el año
if anio == "Todos":
por_cobrar2 = por_cobrar1
else:
anio = int(anio)
por_cobrar2 = por_cobrar1.filter(fecha_ven__year=anio)
# Proceso de filtrado según el mes
if mes == "Todos":
por_cobrar3 = por_cobrar2
else:
num_mes = obtener_mes(mes)
por_cobrar3 = por_cobrar2.filter(fecha_ven__month=num_mes)
# Proceso de filtrado según el estado o tipo
if estado == "Todos":
por_cobrar = por_cobrar3
elif estado == "Pagado":
por_cobrar = por_cobrar3.filter(estado=False)
else:
por_cobrar = por_cobrar3.filter(estado=True)
contexto = self.cargarformPromotordetalle(request)
contexto['object_list']=por_cobrar
contexto['form']=CuentasCobrarPromotorDetalleForm
return render(request, template_name=self.template_name, context=contexto)
########################################################
# Generacion de PDF
########################################################
from reportlab.lib.pagesizes import letter, A5, A6
from reportlab.pdfgen import canvas
from django.http import HttpResponse
from datetime import date
from register.models import Telefono, Direccion
def boleta_A7(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="ticket_{0}.pdf"'.format(datetime.today())
p = canvas.Canvas(response, pagesize=A6)
p.setLineWidth(.3)
p.setFont('Helvetica', 12)
id_cobranza_actual = request.POST["cobranza"]
id_alumno = request.POST["alumno"]
cobranza_actual = Cobranza.objects.get(id_cobranza=id_cobranza_actual)
colegio = Colegio.objects.get(pk=get_current_colegio())
detalle_cobranza = DetalleCobranza.objects.filter(cobranza=cobranza_actual)
alumno = Alumno.objects.get(id_alumno=id_alumno)
cajero = Profile.objects.get(user=get_current_user())
nombre = alumno
monto = [(str(p.monto)) for p in detalle_cobranza]
total = sum([(p.monto) for p in detalle_cobranza])
descripcion = [(str(p.cuentascobrar.servicio.nombre) + " " + str(p.cuentascobrar.servicio.tipo_servicio)) for p in
detalle_cobranza]
fecha = date.today()
dire = Direccion.objects.get(colegio=colegio)
dir_colegio = dire.calle
departamento = dire.get_departamento + " - PERU"
dire_alumno = Direccion.objects.get(persona=alumno.persona)
direccion_alumno = dire_alumno.calle
ruc_colegio = colegio.ruc
numero_recibo = colegio.numero_recibo - 1
p.line(20, 390, 270, 390)
p.setFont('Helvetica', 12)
p.drawString(20, 360, '{0}'.format(colegio))
p.drawString(20, 346, '{0}'.format(dir_colegio))
try:
telefono_colegio = Telefono.objects.get(colegio=colegio)
p.drawString(20, 332, 'Telf.: {0}'.format(telefono_colegio))
p.drawString(20, 318, '{0}'.format(departamento))
except:
p.drawString(20, 332, '{0}'.format(departamento))
p.drawString(180, 360, 'RUC: {0}'.format(ruc_colegio))
p.drawString(180, 346, 'RECIBO N° {0}'.format(numero_recibo))
p.setFont('Helvetica', 10)
p.drawString(20, 305, 'Sr(a): {0}'.format(nombre))
p.drawString(20, 295, 'Dirección: {0}'.format(direccion_alumno))
p.drawString(180, 305, 'Fecha: {0}'.format(fecha))
p.setFont('Helvetica', 10)
p.line(20, 265, 270, 265)
p.line(20, 283, 270, 283)
p.line(195, 265, 195, 283)
p.drawString(20, 270, 'Descripción:')
p.drawString(200, 270, 'Importe S/.')
p.setFont('Helvetica', 10)
for k in range(len(descripcion)):
p.drawString(20, 250 - 15 * k, '{0}'.format(descripcion[k]))
p.drawString(200, 250 - 15 * k, '{0}'.format(monto[k]))
p.line(20, 250 - 15 * len(descripcion) - 3, 270, 250 - 15 * len(descripcion) - 3)
p.line(20, 250 - 15 * len(descripcion) - 18, 270, 250 - 15 * len(descripcion) - 18)
p.drawString(130, 250 - 15 * len(descripcion) - 15, 'TOTAL S/.:')
p.drawString(200, 250 - 15 * len(descripcion) - 15, '{0}'.format(total))
p.showPage()
p.save()
return response
def recibo_A6(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="recibo_A6_{0}.pdf"'.format(datetime.today())
p = canvas.Canvas(response, pagesize=A6)
p.setLineWidth(.3)
p.setFont('Helvetica', 8)
id_cobranza_actual = request.POST["cobranza"]
id_alumno = request.POST["alumno"]
cobranza_actual = Cobranza.objects.get(id_cobranza=id_cobranza_actual)
colegio = Colegio.objects.get(pk=get_current_colegio())
detalle_cobranza = DetalleCobranza.objects.filter(cobranza=cobranza_actual)
alumno = Alumno.objects.get(id_alumno=id_alumno)
cajero = Profile.objects.get(user=get_current_user())
nombre = alumno
monto = [(str(p.monto)) for p in detalle_cobranza]
total = sum([(p.monto) for p in detalle_cobranza])
descripcion = [(str(p.cuentascobrar.servicio.nombre) + " " + str(p.cuentascobrar.servicio.tipo_servicio)) for p in
detalle_cobranza]
fecha = date.today()
dire = Direccion.objects.get(colegio=colegio)
dir_colegio = dire.calle
departamento = dire.get_departamento + " - PERU"
dire_alumno = Direccion.objects.get(persona=alumno.persona)
direccion_alumno = dire_alumno.calle
numero_recibo = colegio.numero_recibo - 1
p.line(20, 390, 270, 390)
p.setFont('Helvetica', 8)
p.drawString(70, 360, '{0}'.format(colegio))
p.drawString(70, 350, '{0}'.format(dir_colegio))
try:
telefono_colegio = Telefono.objects.get(colegio=colegio)
p.drawString(70, 340, 'Telf.: {0}'.format(telefono_colegio))
p.drawString(70, 330, '{0}'.format(departamento))
except:
p.drawString(70, 340, '{0}'.format(departamento))
p.drawString(195, 360, 'RECIBO {0}'.format(numero_recibo))
p.drawString(195, 350, 'FECHA: {0}'.format(fecha))
p.setFont('Helvetica', 6)
p.drawString(20, 310, 'Sr(a): {0}'.format(nombre))
p.drawString(20, 300, 'Dirección: {0}'.format(direccion_alumno))
p.setFont('Helvetica', 8)
p.line(20, 265, 270, 265)
p.line(20, 283, 270, 283)
p.line(195, 265, 195, 283)
p.drawString(20, 270, 'Descripción:')
p.drawString(200, 270, 'Importe S/.')
p.setFont('Helvetica', 6)
for k in range(len(descripcion)):
p.drawString(20, 250 - 15 * k, '{0}'.format(descripcion[k]))
p.drawString(200, 250 - 15 * k, '{0}'.format(monto[k]))
p.line(20, 250 - 15 * len(descripcion) - 3, 270, 250 - 15 * len(descripcion) - 3)
p.line(20, 250 - 15 * len(descripcion) - 18, 270, 250 - 15 * len(descripcion) - 18)
p.drawString(130, 250 - 15 * len(descripcion) - 15, 'TOTAL S/.:')
p.drawString(200, 250 - 15 * len(descripcion) - 15, '{0}'.format(total))
p.showPage()
p.save()
return response
def boleta_A6(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="boleta_A6_{0}.pdf"'.format(datetime.today())
p = canvas.Canvas(response, pagesize=A6)
p.setLineWidth(.3)
p.setFont('Helvetica', 8)
id_cobranza_actual = request.POST["cobranza"]
id_alumno = request.POST["alumno"]
cobranza_actual = Cobranza.objects.get(id_cobranza=id_cobranza_actual)
colegio = Colegio.objects.get(pk=get_current_colegio())
detalle_cobranza = DetalleCobranza.objects.filter(cobranza=cobranza_actual)
alumno = Alumno.objects.get(id_alumno=id_alumno)
cajero = Profile.objects.get(user=get_current_user())
nombre = alumno
monto = [(str(p.monto)) for p in detalle_cobranza]
total = sum([(p.monto) for p in detalle_cobranza])
descripcion = [(str(p.cuentascobrar.servicio.nombre) + " " + str(p.cuentascobrar.servicio.tipo_servicio)) for p in
detalle_cobranza]
fecha = date.today()
dire = Direccion.objects.get(colegio=colegio)
dir_colegio = dire.calle
departamento = dire.get_departamento + " - PERU"
dire_alumno = Direccion.objects.get(persona=alumno.persona)
direccion_alumno = dire_alumno.calle
ruc_colegio = colegio.ruc
numero_recibo = colegio.numero_recibo - 1
p.line(20, 390, 270, 390)
p.setFont('Helvetica', 8)
p.drawString(70, 360, '{0}'.format(colegio))
p.drawString(70, 350, '{0}'.format(dir_colegio))
try:
telefono_colegio = Telefono.objects.get(colegio=colegio)
p.drawString(70, 340, 'Telf.: {0}'.format(telefono_colegio))
p.drawString(70, 330, '{0}'.format(departamento))
except:
p.drawString(70, 340, '{0}'.format(departamento))
p.drawString(195, 360, 'RUC: {0}'.format(ruc_colegio))
p.drawString(195, 350, 'BOLETA DE VENTA')
p.drawString(195, 340, '001 - N° {0}'.format(numero_recibo))
p.setFont('Helvetica', 6)
p.drawString(20, 310, 'Sr(a): {0}'.format(nombre))
p.drawString(20, 300, 'Dirección: {0}'.format(direccion_alumno))
p.drawString(195, 310, 'Fecha: {0}'.format(fecha))
p.setFont('Helvetica', 8)
p.line(20, 265, 270, 265)
p.line(20, 283, 270, 283)
p.line(195, 265, 195, 283)
p.drawString(20, 270, 'Descripción:')
p.drawString(200, 270, 'Importe S/.')
p.setFont('Helvetica', 6)
for k in range(len(descripcion)):
p.drawString(20, 250 - 15 * k, '{0}'.format(descripcion[k]))
p.drawString(200, 250 - 15 * k, '{0}'.format(monto[k]))
p.line(20, 250 - 15 * len(descripcion) - 3, 270, 250 - 15 * len(descripcion) - 3)
p.line(20, 250 - 15 * len(descripcion) - 18, 270, 250 - 15 * len(descripcion) - 18)
p.drawString(130, 250 - 15 * len(descripcion) - 15, 'TOTAL S/.:')
p.drawString(200, 250 - 15 * len(descripcion) - 15, '{0}'.format(total))
p.showPage()
p.save()
return response
def recibo_A5(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="recibo_A5_{0}.pdf"'.format(datetime.today())
p = canvas.Canvas(response, pagesize=A5)
p.setLineWidth(.3)
p.setFont('Helvetica', 8)
id_cobranza_actual = request.POST["cobranza"]
id_alumno = request.POST["alumno"]
cobranza_actual = Cobranza.objects.get(id_cobranza=id_cobranza_actual)
colegio = Colegio.objects.get(pk=get_current_colegio())
detalle_cobranza = DetalleCobranza.objects.filter(cobranza=cobranza_actual)
alumno = Alumno.objects.get(id_alumno=id_alumno)
cajero = Profile.objects.get(user=get_current_user())
nombre = alumno
monto = [(str(p.monto)) for p in detalle_cobranza]
total = sum([(p.monto) for p in detalle_cobranza])
descripcion = [(str(p.cuentascobrar.servicio.nombre) + " " + str(p.cuentascobrar.servicio.tipo_servicio)) for p in
detalle_cobranza]
fecha = date.today()
dire = Direccion.objects.get(colegio=colegio)
dir_colegio = dire.calle
departamento = dire.get_departamento + " - PERU"
dire_alumno = Direccion.objects.get(persona=alumno.persona)
direccion_alumno = dire_alumno.calle
numero_recibo = colegio.numero_recibo - 1
p.line(40, 510, 370, 510)
p.setFont('Helvetica', 10)
p.drawString(90, 490, '{0}'.format(colegio))
p.drawString(90, 480, '{0}'.format(dir_colegio))
try:
telefono_colegio = Telefono.objects.get(colegio=colegio)
p.drawString(90, 470, 'Telf.: {0}'.format(telefono_colegio))
p.drawString(90, 460, '{0}'.format(departamento))
except:
p.drawString(90, 470, '{0}'.format(departamento))
p.drawString(270, 490, 'RECIBO {0}'.format(numero_recibo))
p.drawString(270, 480, 'FECHA: {0}'.format(fecha))
p.setFont('Helvetica', 8)
p.drawString(40, 440, 'Sr(a): {0}'.format(nombre))
p.drawString(40, 430, 'Dirección: {0}'.format(direccion_alumno))
p.setFont('Helvetica', 10)
p.line(40, 395, 370, 395)
p.line(40, 413, 370, 413)
p.line(295, 395, 295, 413)
p.drawString(40, 400, 'Descripción:')
p.drawString(300, 400, 'Importe S/.')
p.setFont('Helvetica', 8)
for k in range(len(descripcion)):
p.drawString(40, 370 - 15 * k, '{0}'.format(descripcion[k]))
p.drawString(300, 370 - 15 * k, '{0}'.format(monto[k]))
p.line(40, 370 - 15 * len(descripcion) - 3, 370, 370 - 15 * len(descripcion) - 3)
p.line(40, 370 - 15 * len(descripcion) - 18, 370, 370 - 15 * len(descripcion) - 18)
p.drawString(230, 370 - 15 * len(descripcion) - 15, 'TOTAL S/.:')
p.drawString(300, 370 - 15 * len(descripcion) - 15, '{0}'.format(total))
p.showPage()
p.save()
return response
def boleta_A5(request):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="boleta_A5_{0}.pdf"'.format(datetime.today())
p = canvas.Canvas(response, pagesize=A5)
p.setLineWidth(.3)
p.setFont('Helvetica', 8)
id_cobranza_actual = request.POST["cobranza"]
id_alumno = request.POST["alumno"]
cobranza_actual = Cobranza.objects.get(id_cobranza=id_cobranza_actual)
colegio = Colegio.objects.get(pk=get_current_colegio())
detalle_cobranza = DetalleCobranza.objects.filter(cobranza=cobranza_actual)
alumno = Alumno.objects.get(id_alumno=id_alumno)
cajero = Profile.objects.get(user=get_current_user())
nombre = alumno
monto = [(str(p.monto)) for p in detalle_cobranza]
total = sum([(p.monto) for p in detalle_cobranza])
descripcion = [(str(p.cuentascobrar.servicio.nombre) + " " + str(p.cuentascobrar.servicio.tipo_servicio)) for p in
detalle_cobranza]
fecha = date.today()
dire = Direccion.objects.get(colegio=colegio)
dir_colegio = dire.calle
departamento = dire.get_departamento + " - PERU"
dire_alumno = Direccion.objects.get(persona=alumno.persona)
direccion_alumno = dire_alumno.calle
ruc_colegio = colegio.ruc
numero_recibo = colegio.numero_recibo - 1
p.line(40, 510, 370, 510)
p.setFont('Helvetica', 10)
p.drawString(90, 490, '{0}'.format(colegio))
p.drawString(90, 480, '{0}'.format(dir_colegio))
try:
telefono_colegio = Telefono.objects.get(colegio=colegio)
p.drawString(90, 470, 'Telf.: {0}'.format(telefono_colegio))
p.drawString(90, 460, '{0}'.format(departamento))
except:
p.drawString(90, 470, '{0}'.format(departamento))
p.drawString(270, 490, 'RUC: {0}'.format(ruc_colegio))
p.drawString(270, 480, 'BOLETA DE VENTA')
p.drawString(270, 470, '001 - N° {0}'.format(numero_recibo))
p.setFont('Helvetica', 8)
p.drawString(40, 440, 'Sr(a): {0}'.format(nombre))
p.drawString(270, 440, 'Fecha: {0}'.format(fecha))
p.drawString(40, 430, 'Dirección: {0}'.format(direccion_alumno))
p.setFont('Helvetica', 10)
p.line(40, 395, 370, 395)
p.line(40, 413, 370, 413)
p.line(295, 395, 295, 413)
p.drawString(40, 400, 'Descripción:')
p.drawString(300, 400, 'Importe S/.')
p.setFont('Helvetica', 8)
for k in range(len(descripcion)):
p.drawString(40, 370 - 15 * k, '{0}'.format(descripcion[k]))
p.drawString(300, 370 - 15 * k, '{0}'.format(monto[k]))
p.line(40, 370 - 15 * len(descripcion) - 3, 370, 370 - 15 * len(descripcion) - 3)
p.line(40, 370 - 15 * len(descripcion) - 18, 370, 370 - 15 * len(descripcion) - 18)
p.drawString(230, 370 - 15 * len(descripcion) - 15, 'TOTAL S/.:')
p.drawString(300, 370 - 15 * len(descripcion) - 15, '{0}'.format(total))
p.showPage()
p.save()
return response
"""
XD XD XD XD XD
"""
"""
PROMOTOR: DETALLE DE PAGOS REALIZADOS POR HIJO, AÑO, MES Y ESTADO
"""
class ControlIngresosPromotorDetallesView2(TemplateView):
model = Cuentascobrar
template_name = "control_ingresos_promotor_detalle2.html"
#form_class = CuentasCobrarPromotorDetalleForm
def cargarformPromotordetalle(self, request):
roles = ['promotor', 'director']
if validar_roles(roles=roles):
# Cargamos los años
anio = datetime.today().year
anios = []
for i in range(0, 3):
anios.append(anio - i)
# Cargamos los meses
meses_todos = ["Todos", "Enero", "Febrero", "Marzo", "Abril", "Mayo", "Junio", "Julio", "Agosto",
"Setiembre", "Octubre", "Noviembre", "Diciembre"]
num_mes = datetime.today().month
meses = []
for i in range(0, num_mes + 1):
meses.append(meses_todos[i])
# Cargamos los estados
estados = ["Todos", "Pagado", "No pagado"]
return {'anios': anios, 'meses': meses_todos, 'estados': estados}
else:
mensaje_error = "No tienes acceso a esta vista"
return {'mensaje_error': mensaje_error} # return context
def get(self, request, *args, **kwargs):
super(ControlIngresosPromotorDetallesView2, self).get(request, *args, **kwargs)
contexto = self.cargarformPromotordetalle(request)
contexto['object_list'] = []
if 'mensaje_error' in contexto.keys():
return HttpResponseRedirect(settings.REDIRECT_PERMISOS)
else:
return render(request, self.template_name, contexto) # return context
def post(self, request, *args, **kwargs):
alumno = request.POST["alumno"]
anio = request.POST["anio"]
mes = request.POST["mes"]
estado = request.POST["estado"]
logger.info(alumno)
colegio = get_current_colegio()
# Proceso de filtrado según el colegio
cuentas_cobrar_colegio = self.model.objetos.filter(matricula__colegio__id_colegio=colegio)
# Proceso de filtrado según el alumno
if alumno == "":
por_cobrar1 = cuentas_cobrar_colegio
else:
por_cobrar1 = cuentas_cobrar_colegio.filter(
Q(matricula__alumno__nombre=alumno) | Q(matricula__alumno__apellido_pa=alumno) | Q(
matricula__alumno__apellido_ma=alumno))
# Proceso de filtrado según el año
if anio == "Todos":
por_cobrar2 = por_cobrar1
else:
anio = int(anio)
por_cobrar2 = por_cobrar1.filter(fecha_ven__year=anio)
# Proceso de filtrado según el mes
if mes == "Todos":
por_cobrar3 = por_cobrar2
else:
num_mes = obtener_mes(mes)
por_cobrar3 = por_cobrar2.filter(fecha_ven__month=num_mes)
# Proceso de filtrado según el estado o tipo
if estado == "Todos":
por_cobrar = por_cobrar3
elif estado == "Pagado":
por_cobrar = por_cobrar3.filter(estado=False)
elif estado == "No_pagado":
por_cobrar = por_cobrar3.filter(estado=True)
contexto = self.cargarformPromotordetalle(request)
paginator = Paginator(por_cobrar.order_by('fecha_ven'), 4)
page = request.GET.get('page', 1)
try:
buscados = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
buscados = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
buscados = paginator.page(paginator.num_pages)
contexto['object_list'] = buscados
#contexto['form'] = CuentasCobrarPromotorDetalleForm
return render(request, self.template_name, contexto)
from import_export import resources
from django.http import HttpResponse
from import_export.fields import Field
class PagoResource(resources.ModelResource):
alumno = Field()
concepto_de_pago = Field()
class Meta:
model = Cuentascobrar
exclude = (
'matricula',
'servicio',
'fecha_creacion',
'fecha_modificacion',
'usuario_modificacion',
'usuario_creacion',
'activo',
'id_cuentascobrar',
'estado',
'comentario',
)
def dehydrate_alumno(self, cuentas):
return '{0}'.format(cuentas.matricula.alumno)
def dehydrate_concepto_de_pago(self, cuentas):
return '{0}'.format(cuentas.servicio.nombre_servicio())
def exportIngresosCSV(request):
pago_resource = PagoResource()
queryset = Cuentascobrar.objects.filter(matricula__colegio__id_colegio=get_current_colegio(), activo=True)
dataset = pago_resource.export(queryset)
response = HttpResponse(dataset.csv, content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="ingresos_{0}.csv"'.format(datetime.today())
return response
|
furthz/colegio
|
src/income/views.py
|
Python
|
mit
| 42,291
|
from warnings import warn
from django.db import models
from avocado.core import utils
from modeltree.tree import trees
from django.db.models.query import QuerySet
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.utils.encoding import smart_unicode
AND = 'AND'
OR = 'OR'
BRANCH_KEYS = ('children', 'type')
CONDITION_KEYS = ('operator', 'value')
COMPOSITE_KEYS = ('composite',)
LOGICAL_OPERATORS = ('and', 'or')
def has_keys(obj, keys):
"Check the required keys are present in `obj`"
for key in keys:
if key not in obj:
return False
return True
def is_branch(obj):
"Validates required structure for a branch node"
if has_keys(obj, keys=BRANCH_KEYS):
return True
def is_condition(obj):
"Validates required structure for a condition node"
if has_keys(obj, keys=CONDITION_KEYS):
if 'field' in obj or 'id' in obj:
return True
def is_composite(obj):
if has_keys(obj, keys=COMPOSITE_KEYS):
return True
class Node(object):
condition = None
annotations = None
extra = None
language = None
def __init__(self, tree=None, **context):
self.tree = tree
self.context = context
def apply(self, queryset=None, distinct=True):
if queryset is None:
queryset = trees[self.tree].get_queryset()
if self.annotations:
queryset = queryset.values('pk').annotate(**self.annotations)
if self.condition:
queryset = queryset.filter(self.condition)
if self.extra:
queryset = queryset.extra(**self.extra)
if distinct:
queryset = queryset.distinct()
return queryset
class Condition(Node):
"Contains information for a single query condition."
def __init__(self, value, operator, id=None, field=None,
concept=None, **context):
if field:
self.field_key = field
else:
self.field_key = id
warn('The "id" key has been replaced with "field"',
DeprecationWarning)
self.concept_key = concept
self.operator = operator
self.value = value
super(Condition, self).__init__(**context)
@property
def _meta(self):
if not hasattr(self, '__meta'):
self.__meta = self.field.translate(operator=self.operator,
value=self.value,
tree=self.tree, **self.context)
return self.__meta
@property
def concept(self):
if not hasattr(self, '_concept'):
if self.concept_key:
from avocado.models import DataConcept
self._concept = DataConcept.objects.get(id=self.concept_key)
else:
self._concept = None
return self._concept
@property
def field(self):
if not hasattr(self, '_field'):
from avocado.models import DataField
# Parse to get into a consistent format
field_key = utils.parse_field_key(self.field_key)
if self.concept:
self._field = self.concept.fields.get(**field_key)
else:
self._field = DataField.objects.get(**field_key)
return self._field
@property
def condition(self):
return self._meta['query_modifiers'].get('condition', None)
@property
def annotations(self):
return self._meta['query_modifiers'].get('annotations', None)
@property
def extra(self):
return self._meta['query_modifiers'].get('extra', None)
@property
def language(self):
meta = self._meta.copy()
meta.pop('query_modifiers')
cleaned = meta.pop('cleaned_data')
meta['language'] = cleaned['language']
return meta
class Branch(Node):
"Provides a logical relationship between it's children."
def __init__(self, type, **context):
self.type = (type.upper() == AND) and AND or OR
self.children = []
super(Branch, self).__init__(**context)
def _combine(self, q1, q2):
if self.type.upper() == OR:
return q1 | q2
return q1 & q2
@property
def condition(self):
if not hasattr(self, '_condition'):
condition = None
for node in self.children:
if node.condition:
if condition:
condition = self._combine(node.condition, condition)
else:
condition = node.condition
self._condition = condition
return self._condition
@property
def annotations(self):
if not hasattr(self, '_annotations'):
self._annotations = {}
for node in self.children:
if node.annotations:
self._annotations.update(node.annotations)
return self._annotations
@property
def extra(self):
extra = {}
for node in self.children:
if not node.extra:
continue
for key, value in node.extra.items():
_type = type(value)
# Initialize an empty container for the value type..
extra.setdefault(key, _type())
if _type is list:
current = extra[key][:]
[extra[key].append(x) for x in value if x not in current]
elif _type is dict:
extra[key].update(value)
else:
raise TypeError('The ".extra()" method only takes list of '
'dicts as keyword values')
return extra
@property
def language(self):
out = {'type': self.type.lower(), 'children': []}
for node in self.children:
out['children'].append(node.language)
return out
def validate(attrs, **context):
if not attrs:
return None
if type(attrs) is not dict:
raise ValidationError('Object must be of type dict')
enabled = attrs.pop('enabled', None)
attrs.pop('errors', None)
attrs.pop('warnings', None)
errors = []
warnings = []
if is_composite(attrs):
from avocado.models import DataContext
try:
if 'user' in context:
cxt = DataContext.objects.get(id=attrs['composite'],
user=context['user'])
else:
cxt = DataContext.objects.get(id=attrs['composite'])
validate(cxt.json, **context)
attrs['language'] = cxt.name
except DataContext.DoesNotExist:
enabled = False
errors.append(u'DataContext "{0}" does not exist.'
.format(attrs['id']))
elif is_condition(attrs):
from avocado.models import DataField, DataConcept
field_key = attrs.get('field', attrs.get('id'))
# Parse to get into a consistent format
field_key = utils.parse_field_key(field_key)
try:
if 'concept' in attrs:
concept = DataConcept.objects.get(id=attrs['concept'])
field = concept.fields.get(**field_key)
else:
field = DataField.objects.get(**field_key)
field.validate(operator=attrs['operator'], value=attrs['value'])
node = parse(attrs, **context)
attrs['language'] = node.language['language']
value = node._meta['cleaned_data']['value']
cleaned = None
if field.enumerable or field.simple_type == 'key':
value_labels = field.value_labels()
if isinstance(value, QuerySet):
cleaned = [{
'value': val.pk,
'label': value_labels[val.pk]
} for val in value]
elif isinstance(value, (list, tuple)):
cleaned = []
for val in value:
if val in value_labels:
label = value_labels[val]
else:
label = smart_unicode(val)
cleaned.append({
'value': val,
'label': label
})
elif isinstance(value, models.Model):
# Values represented by django models
# have only one particular label.
cleaned = {
'value': value.pk,
'label': value_labels[value.pk]
}
else:
# Handle single, non-model values.
if value in value_labels:
label = value_labels[value]
else:
label = smart_unicode(value)
cleaned = {
'value': value,
'label': label,
}
if cleaned:
attrs['cleaned_value'] = cleaned
except ObjectDoesNotExist:
enabled = False
errors.append('Field does not exist')
elif is_branch(attrs):
if attrs['type'] not in LOGICAL_OPERATORS:
enabled = False
else:
map(lambda x: validate(x, **context), attrs['children'])
else:
enabled = False
errors.append('Unknown node type')
# If this condition was originally disabled, ensure that decision is
# persisted
if enabled is False:
attrs['enabled'] = False
if errors:
attrs['errors'] = errors
if warnings:
attrs['warnings'] = warnings
return attrs
def parse(attrs, **context):
if not attrs or attrs.get('enabled') is False:
node = Node(**context)
elif is_composite(attrs):
from avocado.models import DataContext
if 'user' in context:
cxt = DataContext.objects.get(id=attrs['composite'],
user=context['user'])
else:
cxt = DataContext.objects.get(id=attrs['composite'])
return parse(cxt.json, **context)
elif is_condition(attrs):
node = Condition(operator=attrs['operator'], value=attrs['value'],
id=attrs.get('id'), field=attrs.get('field'),
**context)
else:
node = Branch(type=attrs['type'], **context)
node.children = map(lambda x: parse(x, **context), attrs['children'])
return node
|
murphyke/avocado
|
avocado/query/oldparsers/datacontext.py
|
Python
|
bsd-2-clause
| 10,716
|
"""
Create some examples in the time domain.
Simulate and plot impulse behavior for Wave Field Synthesis.
"""
import numpy as np
import matplotlib.pyplot as plt
import sfs
# simulation parameters
grid = sfs.util.xyz_grid([-3, 3], [-3, 3], 0, spacing=0.01)
my_cmap = 'YlOrRd'
N = 56 # number of secondary sources
R = 1.5 # radius of spherical/circular array
array = sfs.array.circular(N, R) # get secondary source positions
fs = 44100 # sampling rate
# unit impulse
signal = [1], fs
# POINT SOURCE
xs = 2, 2, 0 # position of virtual source
t = 0.008
# compute driving signals
d_delay, d_weight, selection, secondary_source = \
sfs.td.wfs.point_25d(array.x, array.n, xs)
d = sfs.td.wfs.driving_signals(d_delay, d_weight, signal)
# test soundfield
twin = sfs.tapering.tukey(selection, alpha=0.3)
p = sfs.td.synthesize(d, twin, array,
secondary_source, observation_time=t, grid=grid)
p = p * 100 # scale absolute amplitude
plt.figure(figsize=(10, 10))
sfs.plot2d.level(p, grid, cmap=my_cmap)
sfs.plot2d.loudspeakers(array.x, array.n, twin)
plt.grid()
sfs.plot2d.virtualsource(xs)
plt.title('impulse_ps_wfs_25d')
plt.savefig('impulse_ps_wfs_25d.png')
# PLANE WAVE
pw_angle = 30 # traveling direction of plane wave
npw = sfs.util.direction_vector(np.radians(pw_angle))
t = -0.001
# compute driving signals
d_delay, d_weight, selection, secondary_source = \
sfs.td.wfs.plane_25d(array.x, array.n, npw)
d = sfs.td.wfs.driving_signals(d_delay, d_weight, signal)
# test soundfield
twin = sfs.tapering.tukey(selection, alpha=0.3)
p = sfs.td.synthesize(d, twin, array,
secondary_source, observation_time=t, grid=grid)
plt.figure(figsize=(10, 10))
sfs.plot2d.level(p, grid, cmap=my_cmap)
sfs.plot2d.loudspeakers(array.x, array.n, twin)
plt.grid()
sfs.plot2d.virtualsource([0, 0], npw, type='plane')
plt.title('impulse_pw_wfs_25d')
plt.savefig('impulse_pw_wfs_25d.png')
# FOCUSED SOURCE
xs = np.r_[0.5, 0.5, 0] # position of virtual source
xref = np.r_[0, 0, 0]
nfs = sfs.util.normalize_vector(xref - xs) # main n of fsource
t = 0.003 # compute driving signals
d_delay, d_weight, selection, secondary_source = \
sfs.td.wfs.focused_25d(array.x, array.n, xs, nfs)
d = sfs.td.wfs.driving_signals(d_delay, d_weight, signal)
# test soundfield
twin = sfs.tapering.tukey(selection, alpha=0.3)
p = sfs.td.synthesize(d, twin, array,
secondary_source, observation_time=t, grid=grid)
p = p * 100 # scale absolute amplitude
plt.figure(figsize=(10, 10))
sfs.plot2d.level(p, grid, cmap=my_cmap)
sfs.plot2d.loudspeakers(array.x, array.n, twin)
plt.grid()
sfs.plot2d.virtualsource(xs)
plt.title('impulse_fs_wfs_25d')
plt.savefig('impulse_fs_wfs_25d.png')
|
sfstoolbox/sfs-python
|
doc/examples/time_domain.py
|
Python
|
mit
| 2,730
|
import copy
from . import functions, instance
from .self_constructor import SelfConstructor
JSON_TYPES = {str, int, float, type(None), bool}
class Type(SelfConstructor):
"""
Implements a simple class interface for trivially JSONable objects.
:Example:
>>> from jsonable import JSONable
>>>
>>>
>>> class Fruit(JSONable):
... __slots__ = ('type', 'weight')
...
... def initialize(self, type, weight):
... self.type = str(type)
... self.weight = float(weight)
...
>>> class Pie(JSONable):
... __slots__ = ('fruit',)
...
... def initialize(self, fruit):
... self.fruit = [Fruit(f) for f in fruit]
...
...
>>> pie = Pie([Fruit('apple', 10.3), Fruit('cherry', 2)])
>>>
>>> doc = pie.to_json()
>>>
>>> pie == Pie(doc)
True
"""
def __new__(cls, *args, **kwargs):
if len(args) == 1 and len(kwargs) == 0 and isinstance(args[0], dict):
return cls.from_json(args[0])
else:
return super().__new__(cls, *args, **kwargs)
def __hash__(self):
return hash(tuple(instance.slots_values(self)))
def __eq__(self, other):
if other == None: return False
try:
for key in instance.slots_keys(self):
if getattr(self, key) != getattr(other, key): return False
return True
except KeyError or AttributeError:
return False
def __neq__(self, other):
return not self.__eq__(other)
def __str__(self): return self.__repr__()
def __repr__(self):
return instance.slots_repr(self)
def to_json(self):
return {k:functions.to_json(v)
for k, v in instance.slots_items(self)
if v is not None}
def __getstate__(self):
return self.to_json()
def __getnewargs__(self):
return (self.to_json(), )
@classmethod
def from_json(cls, doc):
return cls(**doc)
_from_json = from_json
JSONable = Type # For backwards compatibility
class Base(Type):
"""
Implements a simple JSONable datastructure for abstract classes.
:Example:
>>> from jsonable import JSONable, AbstractJSONable
>>>
>>> class Bowl(JSONable):
... __slots__ = ('fruit',)
... def initialize(self, fruit):
... self.fruit = [Fruit(f) for f in fruit]
...
>>>
>>>
>>> class Fruit(AbstractJSONable):
... __slots__ = ('weight',)
... def initialize(self, weight):
... self.weight = float(weight) # lbs
...
>>>
>>> class Apple(Fruit):
... __slots__ = ('variety',)
... def initialize(self, weight, variety):
... super().initialize(weight)
... self.variety = str(variety)
...
>>> Fruit.register(Apple)
>>>
>>> class Orange(Fruit):
... __slots__ = ('radius',)
... def initialize(self, weight, radius):
... super().initialize(weight)
... self.radius = float(radius) # in
...
>>> Fruit.register(Orange)
>>>
>>> orange = Orange(10.1, 2.5)
>>>
>>> apple = Apple(9.2, "Honey Crisp")
>>>
>>> bowl = Bowl([apple, orange])
>>>
>>> doc = bowl.to_json()
>>>
>>> bowl == Bowl(doc)
True
"""
__slots__ = tuple()
REGISTERED_SUB_CLASSES = {}
CLASS_NAME_KEY = "__class__"
def to_json(self):
doc = super().to_json()
doc[self.CLASS_NAME_KEY] = self.__class__.__name__
return doc
@classmethod
def from_json(cls, doc):
if cls.CLASS_NAME_KEY in doc:
class_name = doc[cls.CLASS_NAME_KEY]
if class_name in cls.REGISTERED_SUB_CLASSES:
SubClass = cls.REGISTERED_SUB_CLASSES[class_name]
elif class_name == cls.__name__:
SubClass = cls
else:
raise KeyError(str(class_name) +
" is not a recognized subclass of " +
cls.__name__)
new_doc = copy.copy(doc)
del new_doc[cls.CLASS_NAME_KEY]
return SubClass.from_json(new_doc)
else:
return cls._from_json(doc)
@classmethod
def get(cls, class_name):
return cls.REGISTERED_SUB_CLASSES[class_name]
@classmethod
def register(cls, SubClass):
cls.REGISTERED_SUB_CLASSES[SubClass.__name__] = SubClass
AbstractJSONable = Base # For backwards compatibilit
|
halfak/python-jsonable
|
jsonable/type.py
|
Python
|
mit
| 4,811
|
from AccessControl import ClassSecurityInfo
from archetypes.querywidget.views import WidgetTraverse as _WidgetTraverse
from archetypes.querywidget.widget import QueryWidget as _QueryWidget
from bika.lims.querystring.querybuilder import QueryBuilder
from bika.lims.querystring.querybuilder import RegistryConfiguration
from archetypes.querywidget.views import \
MultiSelectWidget as _MultiSelectWidget
from bika.lims.querystring.registryreader import QuerystringRegistryReader
from plone.app.querystring.interfaces import IQuerystringRegistryReader
from plone.registry.interfaces import IRegistry
from Products.Archetypes.Registry import registerWidget
from Products.Archetypes.Widget import TypesWidget
from Products.CMFCore.permissions import ModifyPortalContent, View
from zope.component import getMultiAdapter
from zope.component import getUtility
from zope.i18nmessageid import MessageFactory
from zope.interface import implements
_p = MessageFactory('plone')
class QueryWidget(_QueryWidget):
_properties = _QueryWidget._properties.copy()
_properties.update({
'macro': 'bika_widgets/querywidget',
'helper_css': ('++resource++archetypes.querywidget.querywidget.css',),
'helper_js': ('++resource++bika.lims.js/querywidget.js',
'@@datepickerconfig'),
'catalog_name': 'portal_catalog',
'registry_prefix': None,
})
security = ClassSecurityInfo()
def getConfig(self):
"""get the config"""
registry = getUtility(IRegistry)
prefix = self.registry_prefix
if prefix is not None:
# First grab the base config's operations
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "plone.app.querystring.operation"
plone_config = registryreader.parseRegistry()
# then merge custom fields
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = prefix
config = registryreader.parseRegistry()
config = registryreader.getVocabularyValues(config)
config.update(plone_config)
config = registryreader.mapOperations(config)
config = registryreader.mapSortableIndexes(config)
config = {
'indexes': config.get(prefix + '.field'),
'sortable_indexes': config.get('sortable'),
}
else:
# First grab the base config's operations
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "plone.app.querystring"
config = registryreader()
# Group indices by "group", order alphabetically
groupedIndexes = {}
for indexName in config['indexes']:
index = config['indexes'][indexName]
if index['enabled']:
group = index['group']
if group not in groupedIndexes:
groupedIndexes[group] = []
groupedIndexes[group].append((index['title'], indexName))
# Sort each index list
[a.sort() for a in groupedIndexes.values()]
config['groupedIndexes'] = groupedIndexes
return config
def SearchResults(self, request, context, accessor):
"""search results"""
options = dict(original_context=context)
res = getMultiAdapter((accessor(), request),
name='display_query_results')
return res(**options)
registerWidget(QueryWidget, title='Query',
description=('Field for storing a query'))
class WidgetTraverse(_WidgetTraverse):
def getConfig(self):
"""get the config"""
registry = getUtility(IRegistry)
# First grab the base config, so we can use the operations
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "plone.app.querystring.operation"
op_config = registryreader.parseRegistry()
# Then combine our fields
registryreader = IQuerystringRegistryReader(registry)
registryreader.prefix = "bika.lims.bika_catalog_query"
config = registryreader.parseRegistry()
config = registryreader.getVocabularyValues(config)
config.update(op_config)
registryreader.mapOperations(config)
registryreader.mapSortableIndexes(config)
config = {
'indexes': config.get('bika.lims.bika_catalog_query.field'),
'sortable_indexes': config.get('sortable'),
}
# Group indices by "group", order alphabetically
groupedIndexes = {}
for indexName in config['indexes']:
index = config['indexes'][indexName]
if index['enabled']:
group = index['group']
if group not in groupedIndexes:
groupedIndexes[group] = []
groupedIndexes[group].append((index['title'], indexName))
# Sort each index list
[a.sort() for a in groupedIndexes.values()]
config['groupedIndexes'] = groupedIndexes
return config
class MultiSelectWidget(WidgetTraverse):
def getValues(self, index=None):
config = self.getConfig()
if not index:
index = self.request.form.get('index')
values = None
if index is not None:
values = config['indexes'][index]['values']
return values
def getSortedValuesKeys(self, values):
# do a lowercase sort of the keys
return sorted(values.iterkeys(), key = lambda x : x.lower())
|
anneline/Bika-LIMS
|
bika/lims/browser/widgets/querywidget.py
|
Python
|
agpl-3.0
| 5,599
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.