repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
Shinoby1992/xstream | default.py | 1 | 1196 | #!/usr/bin/env python2.7
# -*- coding: utf-8 -*-
from os import getcwd
from os.path import join
from sys import path
import xbmc
from xbmc import log
from resources.lib import common
__settings__ = common.addon
__cwd__ = common.addonPath
# Add different library path
path.append(join(__cwd__, "resources", "lib"))
path.append(join(__cwd__, "resources", "lib", "gui"))
path.append(join(__cwd__, "resources", "lib", "handler"))
path.append(join(__cwd__, "resources", "art", "sites"))
path.append(join(__cwd__, "sites"))
log("The new sys.path list: %s" % path, level = xbmc.LOGDEBUG)
# Run xstream
from xstream import run
log('*---- Running xStream, version %s ----*' % __settings__.getAddonInfo('version'))
#import cProfile
#cProfile.run('run()',join(__cwd__,'xstream.pstats'))
try:
run()
except Exception, err:
if str(err) == 'UserAborted':
print "\t[xStream] User aborted list creation"
else:
import traceback
import xbmcgui
print traceback.format_exc()
dialog = xbmcgui.Dialog().ok('Error',str(err.__class__.__name__)+" : "+str(err),str(traceback.format_exc().splitlines()[-3].split('addons')[-1]))
| gpl-3.0 |
jupierce/openshift-tools | ansible/roles/lib_zabbix/library/zbx_action.py | 12 | 24841 | #!/usr/bin/env python
# vim: expandtab:tabstop=4:shiftwidth=4
'''
Ansible module for zabbix actions
'''
#
# Zabbix action ansible module
#
#
# Copyright 2015 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This is in place because each module looks similar to each other.
# These need duplicate code as their behavior is very similar
# but different for each zabbix class.
# pylint: disable=duplicate-code
# pylint: disable=import-error
from openshift_tools.zbxapi import ZabbixAPI, ZabbixConnection, ZabbixAPIError
CUSTOM_SCRIPT_ACTION = '0'
IPMI_ACTION = '1'
SSH_ACTION = '2'
TELNET_ACTION = '3'
GLOBAL_SCRIPT_ACTION = '4'
EXECUTE_ON_ZABBIX_AGENT = '0'
EXECUTE_ON_ZABBIX_SERVER = '1'
OPERATION_REMOTE_COMMAND = '1'
def exists(content, key='result'):
''' Check if key exists in content or the size of content[key] > 0
'''
if not content.has_key(key):
return False
if not content[key]:
return False
return True
def conditions_equal(zab_conditions, user_conditions):
'''Compare two lists of conditions'''
c_type = 'conditiontype'
_op = 'operator'
val = 'value'
if len(user_conditions) != len(zab_conditions):
return False
for zab_cond, user_cond in zip(zab_conditions, user_conditions):
if zab_cond[c_type] != str(user_cond[c_type]) or zab_cond[_op] != str(user_cond[_op]) or \
zab_cond[val] != str(user_cond[val]):
return False
return True
def filter_differences(zabbix_filters, user_filters):
'''Determine the differences from user and zabbix for operations'''
rval = {}
for key, val in user_filters.items():
if key == 'conditions':
if not conditions_equal(zabbix_filters[key], val):
rval[key] = val
elif zabbix_filters[key] != str(val):
rval[key] = val
return rval
def opconditions_diff(zab_val, user_val):
''' Report whether there are differences between opconditions on
zabbix and opconditions supplied by user '''
if len(zab_val) != len(user_val):
return True
for z_cond, u_cond in zip(zab_val, user_val):
if not all([str(u_cond[op_key]) == z_cond[op_key] for op_key in \
['conditiontype', 'operator', 'value']]):
return True
return False
def opmessage_diff(zab_val, user_val):
''' Report whether there are differences between opmessage on
zabbix and opmessage supplied by user '''
for op_msg_key, op_msg_val in user_val.items():
if zab_val[op_msg_key] != str(op_msg_val):
return True
return False
def opmessage_grp_diff(zab_val, user_val):
''' Report whether there are differences between opmessage_grp
on zabbix and opmessage_grp supplied by user '''
zab_grp_ids = set([ugrp['usrgrpid'] for ugrp in zab_val])
usr_grp_ids = set([ugrp['usrgrpid'] for ugrp in user_val])
if usr_grp_ids != zab_grp_ids:
return True
return False
def opmessage_usr_diff(zab_val, user_val):
''' Report whether there are differences between opmessage_usr
on zabbix and opmessage_usr supplied by user '''
zab_usr_ids = set([usr['userid'] for usr in zab_val])
usr_ids = set([usr['userid'] for usr in user_val])
if usr_ids != zab_usr_ids:
return True
return False
def opcommand_diff(zab_op_cmd, usr_op_cmd):
''' Check whether user-provided opcommand matches what's already
stored in Zabbix '''
for usr_op_cmd_key, usr_op_cmd_val in usr_op_cmd.items():
if zab_op_cmd[usr_op_cmd_key] != str(usr_op_cmd_val):
return True
return False
def host_in_zabbix(zab_hosts, usr_host):
''' Check whether a particular user host is already in the
Zabbix list of hosts '''
for usr_hst_key, usr_hst_val in usr_host.items():
for zab_host in zab_hosts:
if usr_hst_key in zab_host and \
zab_host[usr_hst_key] == str(usr_hst_val):
return True
return False
def hostlist_in_zabbix(zab_hosts, usr_hosts):
''' Check whether user-provided list of hosts are already in
the Zabbix action '''
if len(zab_hosts) != len(usr_hosts):
return False
for usr_host in usr_hosts:
if not host_in_zabbix(zab_hosts, usr_host):
return False
return True
# We are comparing two lists of dictionaries (the one stored on zabbix and the
# one the user is providing). For each type of operation, determine whether there
# is a difference between what is stored on zabbix and what the user is providing.
# If there is a difference, we take the user-provided data for what needs to
# be stored/updated into zabbix.
def operation_differences(zabbix_ops, user_ops):
'''Determine the differences from user and zabbix for operations'''
# if they don't match, take the user options
if len(zabbix_ops) != len(user_ops):
return user_ops
rval = {}
for zab, user in zip(zabbix_ops, user_ops):
for oper in user.keys():
if oper == 'opconditions' and opconditions_diff(zab[oper], \
user[oper]):
rval[oper] = user[oper]
elif oper == 'opmessage' and opmessage_diff(zab[oper], \
user[oper]):
rval[oper] = user[oper]
elif oper == 'opmessage_grp' and opmessage_grp_diff(zab[oper], \
user[oper]):
rval[oper] = user[oper]
elif oper == 'opmessage_usr' and opmessage_usr_diff(zab[oper], \
user[oper]):
rval[oper] = user[oper]
elif oper == 'opcommand' and opcommand_diff(zab[oper], \
user[oper]):
rval[oper] = user[oper]
# opcommand_grp can be treated just like opcommand_hst
# as opcommand_grp[] is just a list of groups
elif oper == 'opcommand_hst' or oper == 'opcommand_grp':
if not hostlist_in_zabbix(zab[oper], user[oper]):
rval[oper] = user[oper]
# if it's any other type of operation than the ones tested above
# just do a direct compare
elif oper not in ['opconditions', 'opmessage', 'opmessage_grp',
'opmessage_usr', 'opcommand', 'opcommand_hst',
'opcommand_grp'] \
and str(zab[oper]) != str(user[oper]):
rval[oper] = user[oper]
return rval
def get_users(zapi, users):
'''get the mediatype id from the mediatype name'''
rval_users = []
for user in users:
content = zapi.get_content('user',
'get',
{'filter': {'alias': user}})
rval_users.append({'userid': content['result'][0]['userid']})
return rval_users
def get_user_groups(zapi, groups):
'''get the mediatype id from the mediatype name'''
user_groups = []
for group in groups:
content = zapi.get_content('usergroup',
'get',
{'search': {'name': group}})
for result in content['result']:
user_groups.append({'usrgrpid': result['usrgrpid']})
return user_groups
def get_mediatype_id_by_name(zapi, m_name):
'''get the mediatype id from the mediatype name'''
content = zapi.get_content('mediatype',
'get',
{'filter': {'description': m_name}})
return content['result'][0]['mediatypeid']
def get_priority(priority):
''' determine priority
'''
prior = 0
if 'info' in priority:
prior = 1
elif 'warn' in priority:
prior = 2
elif 'avg' == priority or 'ave' in priority:
prior = 3
elif 'high' in priority:
prior = 4
elif 'dis' in priority:
prior = 5
return prior
def get_event_source(from_src):
'''Translate even str into value'''
choices = ['trigger', 'discovery', 'auto', 'internal']
rval = 0
try:
rval = choices.index(from_src)
except ValueError as _:
ZabbixAPIError('Value not found for event source [%s]' % from_src)
return rval
def get_status(inc_status):
'''determine status for action'''
rval = 1
if inc_status == 'enabled':
rval = 0
return rval
def get_condition_operator(inc_operator):
''' determine the condition operator'''
vals = {'=': 0,
'<>': 1,
'like': 2,
'not like': 3,
'in': 4,
'>=': 5,
'<=': 6,
'not in': 7,
}
return vals[inc_operator]
def get_host_id_by_name(zapi, host_name):
'''Get host id by name'''
content = zapi.get_content('host',
'get',
{'filter': {'name': host_name}})
return content['result'][0]['hostid']
def get_trigger_value(inc_trigger):
'''determine the proper trigger value'''
rval = 1
if inc_trigger == 'PROBLEM':
rval = 1
else:
rval = 0
return rval
def get_template_id_by_name(zapi, t_name):
'''get the template id by name'''
content = zapi.get_content('template',
'get',
{'filter': {'host': t_name}})
return content['result'][0]['templateid']
def get_host_group_id_by_name(zapi, hg_name):
'''Get hostgroup id by name'''
content = zapi.get_content('hostgroup',
'get',
{'filter': {'name': hg_name}})
return content['result'][0]['groupid']
def get_condition_type(event_source, inc_condition):
'''determine the condition type'''
c_types = {}
if event_source == 'trigger':
c_types = {'host group': 0,
'host': 1,
'trigger': 2,
'trigger name': 3,
'trigger severity': 4,
'trigger value': 5,
'time period': 6,
'host template': 13,
'application': 15,
'maintenance status': 16,
}
elif event_source == 'discovery':
c_types = {'host IP': 7,
'discovered service type': 8,
'discovered service port': 9,
'discovery status': 10,
'uptime or downtime duration': 11,
'received value': 12,
'discovery rule': 18,
'discovery check': 19,
'proxy': 20,
'discovery object': 21,
}
elif event_source == 'auto':
c_types = {'proxy': 20,
'host name': 22,
'host metadata': 24,
}
elif event_source == 'internal':
c_types = {'host group': 0,
'host': 1,
'host template': 13,
'application': 15,
'event type': 23,
}
else:
raise ZabbixAPIError('Unkown event source %s' % event_source)
return c_types[inc_condition]
def get_operation_type(inc_operation):
''' determine the correct operation type'''
o_types = {'send message': 0,
'remote command': OPERATION_REMOTE_COMMAND,
'add host': 2,
'remove host': 3,
'add to host group': 4,
'remove from host group': 5,
'link to template': 6,
'unlink from template': 7,
'enable host': 8,
'disable host': 9,
}
return o_types[inc_operation]
def get_opcommand_type(opcommand_type):
''' determine the opcommand type '''
oc_types = {'custom script': CUSTOM_SCRIPT_ACTION,
'IPMI': IPMI_ACTION,
'SSH': SSH_ACTION,
'Telnet': TELNET_ACTION,
'global script': GLOBAL_SCRIPT_ACTION,
}
return oc_types[opcommand_type]
def get_execute_on(execute_on):
''' determine the execution target '''
e_types = {'zabbix agent': EXECUTE_ON_ZABBIX_AGENT,
'zabbix server': EXECUTE_ON_ZABBIX_SERVER,
}
return e_types[execute_on]
def action_remote_command(ansible_module, zapi, operation):
''' Process remote command type of actions '''
if 'type' not in operation['opcommand']:
ansible_module.exit_json(failed=True, changed=False, state='unknown',
results="No Operation Type provided")
operation['opcommand']['type'] = get_opcommand_type(operation['opcommand']['type'])
if operation['opcommand']['type'] == CUSTOM_SCRIPT_ACTION:
if 'execute_on' in operation['opcommand']:
operation['opcommand']['execute_on'] = get_execute_on(operation['opcommand']['execute_on'])
# custom script still requires the target hosts/groups to be set
operation['opcommand_hst'] = []
operation['opcommand_grp'] = []
for usr_host in operation['target_hosts']:
if usr_host['target_type'] == 'zabbix server':
# 0 = target host local/current host
operation['opcommand_hst'].append({'hostid': 0})
elif usr_host['target_type'] == 'group':
group_name = usr_host['target']
gid = get_host_group_id_by_name(zapi, group_name)
operation['opcommand_grp'].append({'groupid': gid})
elif usr_host['target_type'] == 'host':
host_name = usr_host['target']
hid = get_host_id_by_name(zapi, host_name)
operation['opcommand_hst'].append({'hostid': hid})
# 'target_hosts' is just to make it easier to build zbx_actions
# not part of ZabbixAPI
del operation['target_hosts']
else:
ansible_module.exit_json(failed=True, changed=False, state='unknown',
results="Unsupported remote command type")
def get_action_operations(ansible_module, zapi, inc_operations):
'''Convert the operations into syntax for api'''
for operation in inc_operations:
operation['operationtype'] = get_operation_type(operation['operationtype'])
if operation['operationtype'] == 0: # send message. Need to fix the
operation['opmessage']['mediatypeid'] = \
get_mediatype_id_by_name(zapi, operation['opmessage']['mediatypeid'])
operation['opmessage_grp'] = get_user_groups(zapi, operation.get('opmessage_grp', []))
operation['opmessage_usr'] = get_users(zapi, operation.get('opmessage_usr', []))
if operation['opmessage']['default_msg']:
operation['opmessage']['default_msg'] = 1
else:
operation['opmessage']['default_msg'] = 0
elif operation['operationtype'] == OPERATION_REMOTE_COMMAND:
action_remote_command(ansible_module, zapi, operation)
# Handle Operation conditions:
# Currently there is only 1 available which
# is 'event acknowledged'. In the future
# if there are any added we will need to pass this
# option to a function and return the correct conditiontype
if operation.has_key('opconditions'):
for condition in operation['opconditions']:
if condition['conditiontype'] == 'event acknowledged':
condition['conditiontype'] = 14
if condition['operator'] == '=':
condition['operator'] = 0
if condition['value'] == 'acknowledged':
condition['value'] = 1
else:
condition['value'] = 0
return inc_operations
def get_operation_evaltype(inc_type):
'''get the operation evaltype'''
rval = 0
if inc_type == 'and/or':
rval = 0
elif inc_type == 'and':
rval = 1
elif inc_type == 'or':
rval = 2
elif inc_type == 'custom':
rval = 3
return rval
def get_action_conditions(zapi, event_source, inc_conditions):
'''Convert the conditions into syntax for api'''
calc_type = inc_conditions.pop('calculation_type')
inc_conditions['evaltype'] = get_operation_evaltype(calc_type)
for cond in inc_conditions['conditions']:
cond['operator'] = get_condition_operator(cond['operator'])
# Based on conditiontype we need to set the proper value
# e.g. conditiontype = hostgroup then the value needs to be a hostgroup id
# e.g. conditiontype = host the value needs to be a host id
cond['conditiontype'] = get_condition_type(event_source, cond['conditiontype'])
if cond['conditiontype'] == 0:
cond['value'] = get_host_group_id_by_name(zapi, cond['value'])
elif cond['conditiontype'] == 1:
cond['value'] = get_host_id_by_name(zapi, cond['value'])
elif cond['conditiontype'] == 4:
cond['value'] = get_priority(cond['value'])
elif cond['conditiontype'] == 5:
cond['value'] = get_trigger_value(cond['value'])
elif cond['conditiontype'] == 13:
cond['value'] = get_template_id_by_name(zapi, cond['value'])
elif cond['conditiontype'] == 16:
cond['value'] = ''
return inc_conditions
def get_send_recovery(send_recovery):
'''Get the integer value'''
rval = 0
if send_recovery:
rval = 1
return rval
# The branches are needed for CRUD and error handling
# pylint: disable=too-many-branches
def main():
'''
ansible zabbix module for zbx_item
'''
module = AnsibleModule(
argument_spec=dict(
zbx_server=dict(default='https://localhost/zabbix/api_jsonrpc.php', type='str'),
zbx_user=dict(default=os.environ.get('ZABBIX_USER', None), type='str'),
zbx_password=dict(default=os.environ.get('ZABBIX_PASSWORD', None), type='str'),
zbx_debug=dict(default=False, type='bool'),
name=dict(default=None, type='str'),
event_source=dict(default='trigger', choices=['trigger', 'discovery', 'auto', 'internal'], type='str'),
action_subject=dict(default="{TRIGGER.NAME}: {TRIGGER.STATUS}", type='str'),
action_message=dict(default="{TRIGGER.NAME}: {TRIGGER.STATUS}\r\n" +
"Last value: {ITEM.LASTVALUE}\r\n\r\n{TRIGGER.URL}", type='str'),
reply_subject=dict(default="{TRIGGER.NAME}: {TRIGGER.STATUS}", type='str'),
reply_message=dict(default="Trigger: {TRIGGER.NAME}\r\nTrigger status: {TRIGGER.STATUS}\r\n" +
"Trigger severity: {TRIGGER.SEVERITY}\r\nTrigger URL: {TRIGGER.URL}\r\n\r\n" +
"Item values:\r\n\r\n1. {ITEM.NAME1} ({HOST.NAME1}:{ITEM.KEY1}): " +
"{ITEM.VALUE1}\r\n2. {ITEM.NAME2} ({HOST.NAME2}:{ITEM.KEY2}): " +
"{ITEM.VALUE2}\r\n3. {ITEM.NAME3} ({HOST.NAME3}:{ITEM.KEY3}): " +
"{ITEM.VALUE3}", type='str'),
send_recovery=dict(default=False, type='bool'),
status=dict(default=None, type='str'),
escalation_time=dict(default=60, type='int'),
conditions_filter=dict(default=None, type='dict'),
operations=dict(default=None, type='list'),
state=dict(default='present', type='str'),
),
#supports_check_mode=True
)
zapi = ZabbixAPI(ZabbixConnection(module.params['zbx_server'],
module.params['zbx_user'],
module.params['zbx_password'],
module.params['zbx_debug']))
#Set the instance and the template for the rest of the calls
zbx_class_name = 'action'
state = module.params['state']
content = zapi.get_content(zbx_class_name,
'get',
{'filter': {'name': module.params['name']},
'selectFilter': 'extend',
'selectOperations': 'extend',
})
#******#
# GET
#******#
if state == 'list':
module.exit_json(changed=False, results=content['result'], state="list")
#******#
# DELETE
#******#
if state == 'absent':
if not exists(content):
module.exit_json(changed=False, state="absent")
content = zapi.get_content(zbx_class_name, 'delete', [content['result'][0]['actionid']])
module.exit_json(changed=True, results=content['result'], state="absent")
# Create and Update
if state == 'present':
conditions = get_action_conditions(zapi, module.params['event_source'], module.params['conditions_filter'])
operations = get_action_operations(module, zapi,
module.params['operations'])
params = {'name': module.params['name'],
'esc_period': module.params['escalation_time'],
'eventsource': get_event_source(module.params['event_source']),
'status': get_status(module.params['status']),
'def_shortdata': module.params['action_subject'],
'def_longdata': module.params['action_message'],
'r_shortdata': module.params['reply_subject'],
'r_longdata': module.params['reply_message'],
'recovery_msg': get_send_recovery(module.params['send_recovery']),
'filter': conditions,
'operations': operations,
}
# Remove any None valued params
_ = [params.pop(key, None) for key in params.keys() if params[key] is None]
#******#
# CREATE
#******#
if not exists(content):
content = zapi.get_content(zbx_class_name, 'create', params)
if content.has_key('error'):
module.exit_json(failed=True, changed=True, results=content['error'], state="present")
module.exit_json(changed=True, results=content['result'], state='present')
########
# UPDATE
########
_ = params.pop('hostid', None)
differences = {}
zab_results = content['result'][0]
for key, value in params.items():
if key == 'operations':
ops = operation_differences(zab_results[key], value)
if ops:
differences[key] = ops
elif key == 'filter':
filters = filter_differences(zab_results[key], value)
if filters:
differences[key] = filters
elif zab_results[key] != value and zab_results[key] != str(value):
differences[key] = value
if not differences:
module.exit_json(changed=False, results=zab_results, state="present")
# We have differences and need to update.
# action update requires an id, filters, and operations
differences['actionid'] = zab_results['actionid']
differences['operations'] = params['operations']
differences['filter'] = params['filter']
content = zapi.get_content(zbx_class_name, 'update', differences)
if content.has_key('error'):
module.exit_json(failed=True, changed=False, results=content['error'], state="present")
module.exit_json(changed=True, results=content['result'], state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
from ansible.module_utils.basic import *
main()
| apache-2.0 |
mkwiatkowski/pythoscope | pythoscope/generator/cleaner.py | 2 | 2911 | from pythoscope.generator.dependencies import objects_affected_by_side_effects,\
resolve_dependencies
from pythoscope.generator.lines import *
from pythoscope.generator.method_call_context import MethodCallContext
from pythoscope.side_effect import SideEffect
from pythoscope.serializer import ImmutableObject
from pythoscope.util import all_of_type, compact, counted
# :: [Event] -> [Event]
def remove_objects_unworthy_of_naming(events):
new_events = list(events)
side_effects = all_of_type(events, SideEffect)
affected_objects = objects_affected_by_side_effects(side_effects)
invoked_objects = objects_with_method_calls(events) + objects_with_attribute_references(events)
for obj, usage_count in object_usage_counts(events):
# ImmutableObjects don't need to be named, as their identity is
# always unambiguous.
if not isinstance(obj, ImmutableObject):
# Anything mentioned more than once have to be named.
if usage_count > 1:
continue
# Anything affected by side effects is also worth naming.
if obj in affected_objects:
continue
# All user objects with method calls should also get names for
# readability.
if obj in invoked_objects:
continue
try:
while True:
new_events.remove(obj)
except ValueError:
pass # If the element wasn't on the timeline, even better.
return new_events
# :: [Event] -> [SerializedObject]
def objects_with_method_calls(events):
def objects_from_methods(event):
if isinstance(event, MethodCallContext):
return event.user_object
elif isinstance(event, EqualAssertionLine):
return objects_from_methods(event.actual)
elif isinstance(event, RaisesAssertionLine):
return objects_from_methods(event.call)
elif isinstance(event, GeneratorAssertionLine):
return objects_from_methods(event.generator_call)
else:
return None
return compact(map(objects_from_methods, events))
# :: [Event] -> [SerializedObject]
def objects_with_attribute_references(events):
def objects_from_references(event):
if isinstance(event, ObjectAttributeReference):
return event.obj
elif isinstance(event, EqualAssertionLine):
return objects_from_references(event.actual)
elif isinstance(event, RaisesAssertionLine):
return objects_from_references(event.call)
elif isinstance(event, GeneratorAssertionLine):
return objects_from_references(event.generator_call)
else:
return None
return compact(map(objects_from_references, events))
# :: [Event] -> {SerializedObject: int}
def object_usage_counts(timeline):
return counted(resolve_dependencies(timeline))
| mit |
aaltinisik/OCBAltinkaya | addons/hr_timesheet_invoice/report/account_analytic_profit.py | 281 | 5811 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report import report_sxw
from openerp.osv import osv
class account_analytic_profit(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_analytic_profit, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'lines': self._lines,
'user_ids': self._user_ids,
'journal_ids': self._journal_ids,
'line': self._line,
})
def _user_ids(self, lines):
user_obj = self.pool['res.users']
ids=list(set([b.user_id.id for b in lines]))
return user_obj.browse(self.cr, self.uid, ids)
def _journal_ids(self, form, user_id):
if isinstance(user_id, (int, long)):
user_id = [user_id]
line_obj = self.pool['account.analytic.line']
journal_obj = self.pool['account.analytic.journal']
line_ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', form['journal_ids'][0][2]),
('user_id', 'in', user_id),
])
ids=list(set([b.journal_id.id for b in line_obj.browse(self.cr, self.uid, line_ids)]))
return journal_obj.browse(self.cr, self.uid, ids)
def _line(self, form, journal_ids, user_ids):
line_obj = self.pool['account.analytic.line']
product_obj = self.pool['product.product']
price_obj = self.pool['product.pricelist']
ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', journal_ids),
('user_id', 'in', user_ids),
])
res={}
for line in line_obj.browse(self.cr, self.uid, ids):
if line.account_id.pricelist_id:
if line.account_id.to_invoice:
if line.to_invoice:
id=line.to_invoice.id
name=line.to_invoice.name
discount=line.to_invoice.factor
else:
name="/"
discount=1.0
id = -1
else:
name="Fixed"
discount=0.0
id=0
pl=line.account_id.pricelist_id.id
price=price_obj.price_get(self.cr, self.uid, [pl], line.product_id.id, line.unit_amount or 1.0, line.account_id.partner_id.id)[pl]
else:
name="/"
discount=1.0
id = -1
price=0.0
if id not in res:
res[id]={'name': name, 'amount': 0, 'cost':0, 'unit_amount':0,'amount_th':0}
xxx = round(price * line.unit_amount * (1-(discount or 0.0)), 2)
res[id]['amount_th']+=xxx
if line.invoice_id:
self.cr.execute('select id from account_analytic_line where invoice_id=%s', (line.invoice_id.id,))
tot = 0
for lid in self.cr.fetchall():
lid2 = line_obj.browse(self.cr, self.uid, lid[0])
pl=lid2.account_id.pricelist_id.id
price=price_obj.price_get(self.cr, self.uid, [pl], lid2.product_id.id, lid2.unit_amount or 1.0, lid2.account_id.partner_id.id)[pl]
tot += price * lid2.unit_amount * (1-(discount or 0.0))
if tot:
procent = line.invoice_id.amount_untaxed / tot
res[id]['amount'] += xxx * procent
else:
res[id]['amount'] += xxx
else:
res[id]['amount'] += xxx
res[id]['cost']+=line.amount
res[id]['unit_amount']+=line.unit_amount
for id in res:
res[id]['profit']=res[id]['amount']+res[id]['cost']
res[id]['eff']=res[id]['cost'] and '%d' % (-res[id]['amount'] / res[id]['cost'] * 100,) or 0.0
return res.values()
def _lines(self, form):
line_obj = self.pool['account.analytic.line']
ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', form['journal_ids'][0][2]),
('user_id', 'in', form['employee_ids'][0][2]),
])
return line_obj.browse(self.cr, self.uid, ids)
class report_account_analytic_profit(osv.AbstractModel):
_name = 'report.hr_timesheet_invoice.report_analyticprofit'
_inherit = 'report.abstract_report'
_template = 'hr_timesheet_invoice.report_analyticprofit'
_wrapped_report_class = account_analytic_profit
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Ingenico-ePayments/connect-sdk-python2 | examples/merchant/services/iin_details_example.py | 2 | 1113 | #
# This class was auto-generated from the API references found at
# https://epayments-api.developer-ingenico.com/s2sapi/v1/
#
import os
from ingenico.connect.sdk.factory import Factory
from ingenico.connect.sdk.domain.services.get_iin_details_request import GetIINDetailsRequest
class IINDetailsExample(object):
def example(self):
with self.__get_client() as client:
body = GetIINDetailsRequest()
body.bin = "4567350000427977"
response = client.merchant("merchantId").services().get_iin_details(body)
def __get_client(self):
api_key_id = os.getenv("connect.api.apiKeyId", "someKey")
secret_api_key = os.getenv("connect.api.secretApiKey", "someSecret")
configuration_file_name = os.path.abspath(os.path.join(os.path.dirname(__file__),
'../../example_configuration.ini'))
return Factory.create_client_from_file(configuration_file_name=configuration_file_name,
api_key_id=api_key_id, secret_api_key=secret_api_key)
| mit |
nuvotonmcu/uboot | tools/patman/series.py | 7 | 9140 | # Copyright (c) 2011 The Chromium OS Authors.
#
# See file CREDITS for list of people who contributed to this
# project.
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
import itertools
import os
import get_maintainer
import gitutil
import terminal
# Series-xxx tags that we understand
valid_series = ['to', 'cc', 'version', 'changes', 'prefix', 'notes', 'name'];
class Series(dict):
"""Holds information about a patch series, including all tags.
Vars:
cc: List of aliases/emails to Cc all patches to
commits: List of Commit objects, one for each patch
cover: List of lines in the cover letter
notes: List of lines in the notes
changes: (dict) List of changes for each version, The key is
the integer version number
"""
def __init__(self):
self.cc = []
self.to = []
self.commits = []
self.cover = None
self.notes = []
self.changes = {}
# Written in MakeCcFile()
# key: name of patch file
# value: list of email addresses
self._generated_cc = {}
# These make us more like a dictionary
def __setattr__(self, name, value):
self[name] = value
def __getattr__(self, name):
return self[name]
def AddTag(self, commit, line, name, value):
"""Add a new Series-xxx tag along with its value.
Args:
line: Source line containing tag (useful for debug/error messages)
name: Tag name (part after 'Series-')
value: Tag value (part after 'Series-xxx: ')
"""
# If we already have it, then add to our list
if name in self:
values = value.split(',')
values = [str.strip() for str in values]
if type(self[name]) != type([]):
raise ValueError("In %s: line '%s': Cannot add another value "
"'%s' to series '%s'" %
(commit.hash, line, values, self[name]))
self[name] += values
# Otherwise just set the value
elif name in valid_series:
self[name] = value
else:
raise ValueError("In %s: line '%s': Unknown 'Series-%s': valid "
"options are %s" % (commit.hash, line, name,
', '.join(valid_series)))
def AddCommit(self, commit):
"""Add a commit into our list of commits
We create a list of tags in the commit subject also.
Args:
commit: Commit object to add
"""
commit.CheckTags()
self.commits.append(commit)
def ShowActions(self, args, cmd, process_tags):
"""Show what actions we will/would perform
Args:
args: List of patch files we created
cmd: The git command we would have run
process_tags: Process tags as if they were aliases
"""
col = terminal.Color()
print 'Dry run, so not doing much. But I would do this:'
print
print 'Send a total of %d patch%s with %scover letter.' % (
len(args), '' if len(args) == 1 else 'es',
self.get('cover') and 'a ' or 'no ')
# TODO: Colour the patches according to whether they passed checks
for upto in range(len(args)):
commit = self.commits[upto]
print col.Color(col.GREEN, ' %s' % args[upto])
cc_list = list(self._generated_cc[commit.patch])
# Skip items in To list
if 'to' in self:
try:
map(cc_list.remove, gitutil.BuildEmailList(self.to))
except ValueError:
pass
for email in cc_list:
if email == None:
email = col.Color(col.YELLOW, "<alias '%s' not found>"
% tag)
if email:
print ' Cc: ',email
print
for item in gitutil.BuildEmailList(self.get('to', '<none>')):
print 'To:\t ', item
for item in gitutil.BuildEmailList(self.cc):
print 'Cc:\t ', item
print 'Version: ', self.get('version')
print 'Prefix:\t ', self.get('prefix')
if self.cover:
print 'Cover: %d lines' % len(self.cover)
all_ccs = itertools.chain(*self._generated_cc.values())
for email in set(all_ccs):
print ' Cc: ',email
if cmd:
print 'Git command: %s' % cmd
def MakeChangeLog(self, commit):
"""Create a list of changes for each version.
Return:
The change log as a list of strings, one per line
Changes in v4:
- Jog the dial back closer to the widget
Changes in v3: None
Changes in v2:
- Fix the widget
- Jog the dial
etc.
"""
final = []
need_blank = False
for change in sorted(self.changes, reverse=True):
out = []
for this_commit, text in self.changes[change]:
if commit and this_commit != commit:
continue
out.append(text)
line = 'Changes in v%d:' % change
have_changes = len(out) > 0
if have_changes:
out.insert(0, line)
else:
out = [line + ' None']
if need_blank:
out.insert(0, '')
final += out
need_blank = have_changes
if self.changes:
final.append('')
return final
def DoChecks(self):
"""Check that each version has a change log
Print an error if something is wrong.
"""
col = terminal.Color()
if self.get('version'):
changes_copy = dict(self.changes)
for version in range(1, int(self.version) + 1):
if self.changes.get(version):
del changes_copy[version]
else:
if version > 1:
str = 'Change log missing for v%d' % version
print col.Color(col.RED, str)
for version in changes_copy:
str = 'Change log for unknown version v%d' % version
print col.Color(col.RED, str)
elif self.changes:
str = 'Change log exists, but no version is set'
print col.Color(col.RED, str)
def MakeCcFile(self, process_tags, cover_fname):
"""Make a cc file for us to use for per-commit Cc automation
Also stores in self._generated_cc to make ShowActions() faster.
Args:
process_tags: Process tags as if they were aliases
cover_fname: If non-None the name of the cover letter.
Return:
Filename of temp file created
"""
# Look for commit tags (of the form 'xxx:' at the start of the subject)
fname = '/tmp/patman.%d' % os.getpid()
fd = open(fname, 'w')
all_ccs = []
for commit in self.commits:
list = []
if process_tags:
list += gitutil.BuildEmailList(commit.tags)
list += gitutil.BuildEmailList(commit.cc_list)
list += get_maintainer.GetMaintainer(commit.patch)
all_ccs += list
print >>fd, commit.patch, ', '.join(list)
self._generated_cc[commit.patch] = list
if cover_fname:
print >>fd, cover_fname, ', '.join(set(all_ccs))
fd.close()
return fname
def AddChange(self, version, commit, info):
"""Add a new change line to a version.
This will later appear in the change log.
Args:
version: version number to add change list to
info: change line for this version
"""
if not self.changes.get(version):
self.changes[version] = []
self.changes[version].append([commit, info])
def GetPatchPrefix(self):
"""Get the patch version string
Return:
Patch string, like 'RFC PATCH v5' or just 'PATCH'
"""
version = ''
if self.get('version'):
version = ' v%s' % self['version']
# Get patch name prefix
prefix = ''
if self.get('prefix'):
prefix = '%s ' % self['prefix']
return '%sPATCH%s' % (prefix, version)
| gpl-2.0 |
wakermahmud/docker-registry | docker_registry/lib/checksums.py | 35 | 2253 | # -*- coding: utf-8 -*-
import hashlib
import logging
logger = logging.getLogger(__name__)
def sha256_file(fp, data=None):
h = hashlib.sha256(data or '')
if not fp:
return h.hexdigest()
while True:
buf = fp.read(4096)
if not buf:
break
h.update(buf)
return h.hexdigest()
def sha256_string(s):
return hashlib.sha256(s).hexdigest()
class TarSum(object):
def __init__(self, json_data):
self.json_data = json_data
self.header_fields = ('name', 'mode', 'uid', 'gid', 'size', 'mtime',
'type', 'linkname', 'uname', 'gname', 'devmajor',
'devminor')
self.hashes = []
def append(self, member, tarobj):
header = ''
for field in self.header_fields:
value = getattr(member, field)
if field == 'type':
field = 'typeflag'
elif field == 'name':
if member.isdir() and not value.endswith('/'):
value += '/'
header += '{0}{1}'.format(field, value)
h = None
try:
if member.size > 0:
f = tarobj.extractfile(member)
h = sha256_file(f, header)
else:
h = sha256_string(header)
except KeyError:
h = sha256_string(header)
self.hashes.append(h)
def compute(self):
self.hashes.sort()
data = self.json_data + ''.join(self.hashes)
tarsum = 'tarsum+sha256:{0}'.format(sha256_string(data))
logger.debug('checksums.compute_tarsum: return {0}'.format(tarsum))
return tarsum
def simple_checksum_handler(json_data):
h = hashlib.sha256(json_data + '\n')
def fn(buf):
h.update(buf)
return h, fn
def compute_simple(fp, json_data):
data = json_data + '\n'
return 'sha256:{0}'.format(sha256_file(fp, data))
if __name__ == '__main__':
import sys
if len(sys.argv) < 3:
print('Usage: {0} json_file layer_file'.format(sys.argv[0]))
sys.exit(1)
json_data = file(sys.argv[1]).read()
fp = open(sys.argv[2])
print(compute_simple(fp, json_data))
# print compute_tarsum(fp, json_data)
| apache-2.0 |
jyotsna1820/django | tests/comment_tests/tests/__init__.py | 13 | 2811 | from django.contrib.auth.models import User
from django.contrib.comments.forms import CommentForm
from django.contrib.comments.models import Comment
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
from django.test import TestCase, override_settings
from ..models import Article, Author
# Shortcut
CT = ContentType.objects.get_for_model
# Helper base class for comment tests that need data.
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.UnsaltedMD5PasswordHasher',))
class CommentTestCase(TestCase):
fixtures = ["comment_tests"]
urls = 'comment_tests.urls_default'
def createSomeComments(self):
# Two anonymous comments on two different objects
c1 = Comment.objects.create(
content_type = CT(Article),
object_pk = "1",
user_name = "Joe Somebody",
user_email = "jsomebody@example.com",
user_url = "http://example.com/~joe/",
comment = "First!",
site = Site.objects.get_current(),
)
c2 = Comment.objects.create(
content_type = CT(Author),
object_pk = "1",
user_name = "Joe Somebody",
user_email = "jsomebody@example.com",
user_url = "http://example.com/~joe/",
comment = "First here, too!",
site = Site.objects.get_current(),
)
# Two authenticated comments: one on the same Article, and
# one on a different Author
user = User.objects.create(
username = "frank_nobody",
first_name = "Frank",
last_name = "Nobody",
email = "fnobody@example.com",
password = "",
is_staff = False,
is_active = True,
is_superuser = False,
)
c3 = Comment.objects.create(
content_type = CT(Article),
object_pk = "1",
user = user,
user_url = "http://example.com/~frank/",
comment = "Damn, I wanted to be first.",
site = Site.objects.get_current(),
)
c4 = Comment.objects.create(
content_type = CT(Author),
object_pk = "2",
user = user,
user_url = "http://example.com/~frank/",
comment = "You get here first, too?",
site = Site.objects.get_current(),
)
return c1, c2, c3, c4
def getData(self):
return {
'name' : 'Jim Bob',
'email' : 'jim.bob@example.com',
'url' : '',
'comment' : 'This is my comment',
}
def getValidData(self, obj):
f = CommentForm(obj)
d = self.getData()
d.update(f.initial)
return d
| bsd-3-clause |
steedos/odoo7 | openerp/addons/point_of_sale/__init__.py | 61 | 1191 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import point_of_sale
import account_bank_statement
import res_users
import res_partner
import wizard
import report
import controllers
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nirmeshk/oh-mainline | vendor/packages/celery/celery/tests/test_events/test_events_state.py | 18 | 10779 | from __future__ import absolute_import
from time import time
from itertools import count
from celery import states
from celery.events import Event
from celery.events.state import State, Worker, Task, HEARTBEAT_EXPIRE
from celery.utils import uuid
from celery.tests.utils import unittest
class replay(object):
def __init__(self, state):
self.state = state
self.rewind()
self.setup()
def setup(self):
pass
def __iter__(self):
return self
def __next__(self):
try:
self.state.event(self.events[self.position()])
except IndexError:
raise StopIteration()
next = __next__
def rewind(self):
self.position = count(0).next
return self
def play(self):
for _ in self:
pass
class ev_worker_online_offline(replay):
def setup(self):
self.events = [
Event("worker-online", hostname="utest1"),
Event("worker-offline", hostname="utest1"),
]
class ev_worker_heartbeats(replay):
def setup(self):
self.events = [
Event("worker-heartbeat", hostname="utest1",
timestamp=time() - HEARTBEAT_EXPIRE * 2),
Event("worker-heartbeat", hostname="utest1"),
]
class ev_task_states(replay):
def setup(self):
tid = self.tid = uuid()
self.events = [
Event("task-received", uuid=tid, name="task1",
args="(2, 2)", kwargs="{'foo': 'bar'}",
retries=0, eta=None, hostname="utest1"),
Event("task-started", uuid=tid, hostname="utest1"),
Event("task-revoked", uuid=tid, hostname="utest1"),
Event("task-retried", uuid=tid, exception="KeyError('bar')",
traceback="line 2 at main", hostname="utest1"),
Event("task-failed", uuid=tid, exception="KeyError('foo')",
traceback="line 1 at main", hostname="utest1"),
Event("task-succeeded", uuid=tid, result="4",
runtime=0.1234, hostname="utest1"),
]
class ev_snapshot(replay):
def setup(self):
self.events = [
Event("worker-online", hostname="utest1"),
Event("worker-online", hostname="utest2"),
Event("worker-online", hostname="utest3"),
]
for i in range(20):
worker = not i % 2 and "utest2" or "utest1"
type = not i % 2 and "task2" or "task1"
self.events.append(Event("task-received", name=type,
uuid=uuid(), hostname=worker))
class test_Worker(unittest.TestCase):
def test_survives_missing_timestamp(self):
worker = Worker(hostname="foo")
worker.on_heartbeat(timestamp=None)
self.assertEqual(worker.heartbeats, [])
def test_repr(self):
self.assertTrue(repr(Worker(hostname="foo")))
class test_Task(unittest.TestCase):
def test_info(self):
task = Task(uuid="abcdefg",
name="tasks.add",
args="(2, 2)",
kwargs="{}",
retries=2,
result=42,
eta=1,
runtime=0.0001,
expires=1,
exception=1,
received=time() - 10,
started=time() - 8,
succeeded=time())
self.assertEqual(sorted(list(task._info_fields)),
sorted(task.info().keys()))
self.assertEqual(sorted(list(task._info_fields + ("received", ))),
sorted(task.info(extra=("received", ))))
self.assertEqual(sorted(["args", "kwargs"]),
sorted(task.info(["args", "kwargs"]).keys()))
def test_ready(self):
task = Task(uuid="abcdefg",
name="tasks.add")
task.on_received(timestamp=time())
self.assertFalse(task.ready)
task.on_succeeded(timestamp=time())
self.assertTrue(task.ready)
def test_sent(self):
task = Task(uuid="abcdefg",
name="tasks.add")
task.on_sent(timestamp=time())
self.assertEqual(task.state, states.PENDING)
def test_merge(self):
task = Task()
task.on_failed(timestamp=time())
task.on_started(timestamp=time())
task.on_received(timestamp=time(), name="tasks.add", args=(2, 2))
self.assertEqual(task.state, states.FAILURE)
self.assertEqual(task.name, "tasks.add")
self.assertTupleEqual(task.args, (2, 2))
task.on_retried(timestamp=time())
self.assertEqual(task.state, states.RETRY)
def test_repr(self):
self.assertTrue(repr(Task(uuid="xxx", name="tasks.add")))
class test_State(unittest.TestCase):
def test_repr(self):
self.assertTrue(repr(State()))
def test_worker_online_offline(self):
r = ev_worker_online_offline(State())
r.next()
self.assertTrue(r.state.alive_workers())
self.assertTrue(r.state.workers["utest1"].alive)
r.play()
self.assertFalse(r.state.alive_workers())
self.assertFalse(r.state.workers["utest1"].alive)
def test_worker_heartbeat_expire(self):
r = ev_worker_heartbeats(State())
r.next()
self.assertFalse(r.state.alive_workers())
self.assertFalse(r.state.workers["utest1"].alive)
r.play()
self.assertTrue(r.state.alive_workers())
self.assertTrue(r.state.workers["utest1"].alive)
def test_task_states(self):
r = ev_task_states(State())
# RECEIVED
r.next()
self.assertTrue(r.tid in r.state.tasks)
task = r.state.tasks[r.tid]
self.assertEqual(task.state, states.RECEIVED)
self.assertTrue(task.received)
self.assertEqual(task.timestamp, task.received)
self.assertEqual(task.worker.hostname, "utest1")
# STARTED
r.next()
self.assertTrue(r.state.workers["utest1"].alive,
"any task event adds worker heartbeat")
self.assertEqual(task.state, states.STARTED)
self.assertTrue(task.started)
self.assertEqual(task.timestamp, task.started)
self.assertEqual(task.worker.hostname, "utest1")
# REVOKED
r.next()
self.assertEqual(task.state, states.REVOKED)
self.assertTrue(task.revoked)
self.assertEqual(task.timestamp, task.revoked)
self.assertEqual(task.worker.hostname, "utest1")
# RETRY
r.next()
self.assertEqual(task.state, states.RETRY)
self.assertTrue(task.retried)
self.assertEqual(task.timestamp, task.retried)
self.assertEqual(task.worker.hostname, "utest1")
self.assertEqual(task.exception, "KeyError('bar')")
self.assertEqual(task.traceback, "line 2 at main")
# FAILURE
r.next()
self.assertEqual(task.state, states.FAILURE)
self.assertTrue(task.failed)
self.assertEqual(task.timestamp, task.failed)
self.assertEqual(task.worker.hostname, "utest1")
self.assertEqual(task.exception, "KeyError('foo')")
self.assertEqual(task.traceback, "line 1 at main")
# SUCCESS
r.next()
self.assertEqual(task.state, states.SUCCESS)
self.assertTrue(task.succeeded)
self.assertEqual(task.timestamp, task.succeeded)
self.assertEqual(task.worker.hostname, "utest1")
self.assertEqual(task.result, "4")
self.assertEqual(task.runtime, 0.1234)
def assertStateEmpty(self, state):
self.assertFalse(state.tasks)
self.assertFalse(state.workers)
self.assertFalse(state.event_count)
self.assertFalse(state.task_count)
def assertState(self, state):
self.assertTrue(state.tasks)
self.assertTrue(state.workers)
self.assertTrue(state.event_count)
self.assertTrue(state.task_count)
def test_freeze_while(self):
s = State()
r = ev_snapshot(s)
r.play()
def work():
pass
s.freeze_while(work, clear_after=True)
self.assertFalse(s.event_count)
s2 = State()
r = ev_snapshot(s2)
r.play()
s2.freeze_while(work, clear_after=False)
self.assertTrue(s2.event_count)
def test_clear_tasks(self):
s = State()
r = ev_snapshot(s)
r.play()
self.assertTrue(s.tasks)
s.clear_tasks(ready=False)
self.assertFalse(s.tasks)
def test_clear(self):
r = ev_snapshot(State())
r.play()
self.assertTrue(r.state.event_count)
self.assertTrue(r.state.workers)
self.assertTrue(r.state.tasks)
self.assertTrue(r.state.task_count)
r.state.clear()
self.assertFalse(r.state.event_count)
self.assertFalse(r.state.workers)
self.assertTrue(r.state.tasks)
self.assertFalse(r.state.task_count)
r.state.clear(False)
self.assertFalse(r.state.tasks)
def test_task_types(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(sorted(r.state.task_types()), ["task1", "task2"])
def test_tasks_by_timestamp(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.tasks_by_timestamp()), 20)
def test_tasks_by_type(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.tasks_by_type("task1")), 10)
self.assertEqual(len(r.state.tasks_by_type("task2")), 10)
def test_alive_workers(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.alive_workers()), 3)
def test_tasks_by_worker(self):
r = ev_snapshot(State())
r.play()
self.assertEqual(len(r.state.tasks_by_worker("utest1")), 10)
self.assertEqual(len(r.state.tasks_by_worker("utest2")), 10)
def test_survives_unknown_worker_event(self):
s = State()
s.worker_event("worker-unknown-event-xxx", {"foo": "bar"})
s.worker_event("worker-unknown-event-xxx", {"hostname": "xxx",
"foo": "bar"})
def test_survives_unknown_task_event(self):
s = State()
s.task_event("task-unknown-event-xxx", {"foo": "bar",
"uuid": "x",
"hostname": "y"})
def test_callback(self):
scratch = {}
def callback(state, event):
scratch["recv"] = True
s = State(callback=callback)
s.event({"type": "worker-online"})
self.assertTrue(scratch.get("recv"))
| agpl-3.0 |
sokolic/miniSASS | minisass_registration/forms.py | 1 | 6601 | from django import forms
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
# Frank Sokolic: June 2018 - Disabled all the recaptcha code as version 1 is no longer supported
#from cmsplugin_contact.nospam.widgets import RecaptchaChallenge, RecaptchaResponse
from registration.forms import RegistrationForm
from minisass_registration.models import Lookup
def _get_organisation_types():
result = [('','-- Select a Type --')]
qs = Lookup.objects.filter(
container__description='Organisation Type',
active=True)
qs = qs.order_by('rank', 'description')
result.extend([(itm.id, itm.description,) for itm in qs])
return result
def _get_organisation_names():
return []
def _get_countries():
result = [('','-- Select a Country --')]
qs = Lookup.objects.raw("SELECT * FROM minisass_registration_lookup WHERE container_id='8' AND active ='t' ORDER BY rank = 0, rank, description" )
result.extend([(itm.id, itm.description,) for itm in qs])
return result
def _get_countries_old():
result = [('','-- Select a Country --')]
qs = Lookup.objects.filter(
container__description='Country',
active=True)
qs = qs.order_by('rank', 'description')
result.extend([(itm.id, itm.description,) for itm in qs])
return result
class miniSASSregistrationForm(RegistrationForm):
""" Add fields for firstname, lastname and organisation
"""
firstname = forms.CharField(
label=_("Name"),
max_length=30,
help_text=_(u"Kept confidential"))
lastname = forms.CharField(
label=_("Surname"),
max_length=30,
help_text=_(u"Kept confidential"))
organisation_type = forms.ChoiceField(
label=_("Organisation Type"),
required=True,
help_text=_(u"Please select an organisation type, \
or private individual"))
organisation_name = forms.CharField(
label=_("Organisation Name"),
max_length=50,
help_text=_(u"Please check if school already listed, \
then add if not."),
required=False)
country = forms.ChoiceField(
label=_("Country"),
required=False,
help_text=_(u"Please select a country"))
# recaptcha_challenge_field = forms.CharField(widget=RecaptchaChallenge)
# recaptcha_response_field = forms.CharField(
# widget = RecaptchaResponse,
# label = _('Please enter the letters/digits you see in the image :'),
# error_messages = {
# 'required': _('You did not enter any of the words.')
# })
# recaptcha_always_validate = False
def __init__(self, request, *args, **kwargs):
# Because the ReCAPTCHA library requires the fields to be named a
# certain way, using a form prefix will break the validation unless we
# modify the received POST and rename the keys accordingly
self._request = request
if ('data' in kwargs or len(args) > 1) and 'prefix' in kwargs:
data = kwargs.get('data', args[1]).__copy__()
# data['%s-recaptcha_challenge_field' % kwargs['prefix']] = \
# data.pop('recaptcha_challenge_field', [u''])[0]
# data['%s-recaptcha_response_field' % kwargs['prefix']] = \
# data.pop('recaptcha_response_field', [u''])[0]
data._mutable = False
# Since data could have been passed eith as an arg or kwarg, set
# the right one to the new data
if 'data' in kwargs:
kwargs['data'] = data
else:
args = (args[0], data) + args[2:]
super(miniSASSregistrationForm, self).__init__(*args, **kwargs)
# self._recaptcha_public_key = getattr(self, 'recaptcha_public_key', getattr(settings, 'RECAPTCHA_PUBLIC_KEY', None))
# self._recaptcha_private_key = getattr(self, 'recaptcha_private_key', getattr(settings, 'RECAPTCHA_PRIVATE_KEY', None))
# self._recaptcha_theme = getattr(self, 'recaptcha_theme', getattr(settings, 'RECAPTCHA_THEME', 'clean'))
# self.fields['recaptcha_response_field'].widget.public_key = self._recaptcha_public_key
# self.fields['recaptcha_response_field'].widget.theme = self._recaptcha_theme
# Move the ReCAPTCHA fields to the end of the form
# self.fields['recaptcha_challenge_field'] = self.fields.pop('recaptcha_challenge_field')
# self.fields['recaptcha_response_field'] = self.fields.pop('recaptcha_response_field')
self.fields['username'].help_text = \
_(u"Public username (don't use any spaces)")
self.fields['username'].error_messages={'invalid': _("The username may only contain letters, numbers and @, fullstop, plus, minus or underscore characters. NO SPACES.")}
self.fields['email'].help_text = _(u"Kept confidential")
self.fields['organisation_type'].choices = _get_organisation_types()
self.fields['country'].choices = _get_countries()
self.fields.keyOrder = [
'username',
'firstname', 'lastname',
'email',
'organisation_type',
'organisation_name',
'country',
'password1',
'password2',
# 'recaptcha_challenge_field',
# 'recaptcha_response_field'
]
# def clean_recaptcha_response_field(self):
# if 'recaptcha_challenge_field' in self.cleaned_data:
# self._validate_captcha()
# return self.cleaned_data['recaptcha_response_field']
# def clean_recaptcha_challenge_field(self):
# if 'recaptcha_response_field' in self.cleaned_data:
# self._validate_captcha()
# return self.cleaned_data['recaptcha_challenge_field']
# def _validate_captcha(self):
# if not self.recaptcha_always_validate:
# rcf = self.cleaned_data['recaptcha_challenge_field']
# rrf = self.cleaned_data['recaptcha_response_field']
# if rrf == '':
# raise forms.ValidationError(_('You did not enter the two words shown in the image.'))
# else:
# from recaptcha.client import captcha as recaptcha
# ip = self._request.META['REMOTE_ADDR']
# check = recaptcha.submit(rcf, rrf, self._recaptcha_private_key, ip)
# if not check.is_valid:
# raise forms.ValidationError(_('The words you entered did not match the image.'))
| gpl-3.0 |
abhilashnta/edx-platform | lms/djangoapps/instructor/offline_gradecalc.py | 58 | 3278 | """
======== Offline calculation of grades =============================================================
Computing grades of a large number of students can take a long time. These routines allow grades to
be computed offline, by a batch process (eg cronjob).
The grades are stored in the OfflineComputedGrade table of the courseware model.
"""
import json
import time
from json import JSONEncoder
from courseware import grades, models
from courseware.courses import get_course_by_id
from django.contrib.auth.models import User
from instructor.utils import DummyRequest
class MyEncoder(JSONEncoder):
def _iterencode(self, obj, markers=None):
if isinstance(obj, tuple) and hasattr(obj, '_asdict'):
gen = self._iterencode_dict(obj._asdict(), markers)
else:
gen = JSONEncoder._iterencode(self, obj, markers)
for chunk in gen:
yield chunk
def offline_grade_calculation(course_key):
'''
Compute grades for all students for a specified course, and save results to the DB.
'''
tstart = time.time()
enrolled_students = User.objects.filter(
courseenrollment__course_id=course_key,
courseenrollment__is_active=1
).prefetch_related("groups").order_by('username')
enc = MyEncoder()
print "{} enrolled students".format(len(enrolled_students))
course = get_course_by_id(course_key)
for student in enrolled_students:
request = DummyRequest()
request.user = student
request.session = {}
gradeset = grades.grade(student, request, course, keep_raw_scores=True)
gs = enc.encode(gradeset)
ocg, _created = models.OfflineComputedGrade.objects.get_or_create(user=student, course_id=course_key)
ocg.gradeset = gs
ocg.save()
print "%s done" % student # print statement used because this is run by a management command
tend = time.time()
dt = tend - tstart
ocgl = models.OfflineComputedGradeLog(course_id=course_key, seconds=dt, nstudents=len(enrolled_students))
ocgl.save()
print ocgl
print "All Done!"
def offline_grades_available(course_key):
'''
Returns False if no offline grades available for specified course.
Otherwise returns latest log field entry about the available pre-computed grades.
'''
ocgl = models.OfflineComputedGradeLog.objects.filter(course_id=course_key)
if not ocgl:
return False
return ocgl.latest('created')
def student_grades(student, request, course, keep_raw_scores=False, use_offline=False):
'''
This is the main interface to get grades. It has the same parameters as grades.grade, as well
as use_offline. If use_offline is True then this will look for an offline computed gradeset in the DB.
'''
if not use_offline:
return grades.grade(student, request, course, keep_raw_scores=keep_raw_scores)
try:
ocg = models.OfflineComputedGrade.objects.get(user=student, course_id=course.id)
except models.OfflineComputedGrade.DoesNotExist:
return dict(
raw_scores=[],
section_breakdown=[],
msg='Error: no offline gradeset available for {}, {}'.format(student, course.id)
)
return json.loads(ocg.gradeset)
| agpl-3.0 |
julianprabhakar/eden_car | modules/s3db/skeleton.py | 15 | 7579 | # -*- coding: utf-8 -*-
"""
This is just a commented template to copy/paste from when implementing
new models. Be sure you replace this docstring by something more
appropriate, e.g. a short module description and a license statement.
The module prefix is the same as the filename (without the ".py"), in this
case "skeleton". Remember to always add an import statement for your module
to:
models/00_tables.py
like:
import eden.skeleton
(Yeah - not this one of course :P it's just an example)
"""
# mandatory __all__ statement:
#
# - all classes in the name list will be initialized with the
# module prefix as only parameter. Subclasses of S3Model
# support this automatically, and run the model() method
# if the module is enabled in deployment_settings, otherwise
# the default() method.
#
# - all other names in the name list will be added to response.s3
# if their names start with the module prefix plus underscore
#
__all__ = ("SkeletonDataModel",
"skeleton_example_represent"
)
# The following import statements are needed in almost every model
# (you may need more than this in your particular case). To
# import classes from s3, use from + relative path like below
#
from gluon import *
from gluon.storage import Storage
from ..s3 import *
from s3layouts import S3AddResourceLink
# =============================================================================
# Define a new class as subclass of S3Model
# => you can define multiple of these classes within the same module, each
# of them will be initialized only when one of the declared names gets
# requested from s3db
# => remember to list all model classes in __all__, otherwise they won't ever
# be loaded.
#
class S3SkeletonDataModel(S3Model):
# Declare all the names this model can auto-load, i.e. all tablenames
# and all response.s3 names which are defined here. If you omit the "names"
# variable, then this class will serve as a fallback model for this module
# in case a requested name cannot be found in one of the other model classes
#
names = ("skeleton_example",
"skeleton_example_id",
)
# Define a function model() which takes no parameters (except self):
def model(self):
# You will most likely need (at least) these:
db = current.db
T = current.T
# This one may be useful:
settings = current.deployment_settings
# Now define your table(s),
# -> always use self.define_table instead of db.define_table, this
# makes sure the table won't be re-defined if it's already in db
# -> use s3_meta_fields to include meta fields (not s3_meta_fields!),
# of course this needs the s3 assignment above
tablename = "skeleton_example"
self.define_table(tablename,
Field("name"),
*s3_meta_fields())
# Use self.configure to configure your model (or current.s3db.configure)
self.configure(tablename,
listadd=False)
# The following shortcuts for S3 model functions are available (make
# sure you do not overwrite them):
#
# self.define_table => db.define_table (repeat-safe variant)
# self.super_entity => super_entity
# self.super_key => super_key
# self.super_link => super_link
# self.add_components => s3db.add_components
# self.configure => s3db.configure
# self.table => s3db.table
#
# If you need to reference external tables, always use the table-method.
# This will automatically load the respective model unless it is already
# loaded at this point:
xy_table = self.table("xy_table")
# Alternatively, you can also use on of these:
xy_table = self.xy_table
xy_table = self["xy_table"]
# The following two are equivalent:
xy_variable = self.xy_variable
# and:
xy_variable = response.s3.xy_variable
# However, if "xy_variable" is also a tablename, then the first
# variant would return that table instead. Thus, make sure your
# response.s3-global variables do not use tablenames as names
# You can define ReusableFields,
# -> make sure you prefix their names properly with the module prefix:
skeleton_example_id = S3ReusableField("skeleton_example_id", "reference %s" % tablename,
label = T("Skeleton Example"),
requires = IS_EMPTY_OR(IS_ONE_OF(db,
"skeleton_example.id")))
# Pass names back to global scope (s3.*)
return dict(
skeleton_example_id=skeleton_example_id,
)
# -------------------------------------------------------------------------
@staticmethod
def defaults():
"""
Return safe defaults for model globals, this will be called instead
of model() in case the model has been deactivated in
deployment_settings.
You don't need this function in case your model is mandatory anyway.
"""
return dict(
skeleton_example_id = S3ReusableField("skeleton_example_id",
"integer",
readable=False,
writable=False),
)
# ---------------------------------------------------------------------
# Static so that calling it doesn't require loading the models
@staticmethod
def skeleton_example_onvalidation(form):
""" Form validation """
db = current.db
# Note that we don't need to use s3db here since this is a method of the class,
# so the table must have loaded
table = db.skeleton_example
query = (table.id == form.vars.id)
record = db(query).select(table.name,
limitby=(0, 1)).first()
return
# =============================================================================
# Module-global functions will automatically be added to response.s3 if
# they use the module prefix and are listed in __all__
#
# Represents are good to put here as they can be put places without loading the
# models at that time
#
def skeleton_example_represent(id):
if not id:
# Don't do a DB lookup if we have no id
# Instead return a consistenct representation of a null value
return current.messages["NONE"]
# Your function may need to access tables. If a table isn't defined
# at the point when this function gets called, then this:
s3db = current.s3db
table = s3db.skeleton_table
# will load the table. This is the same function as self.table described in
# the model class except that "self" is not available here, so you need to
# use the class instance as reference instead
db = current.db
query = (table.id == id)
record = db(query).select(table.name,
limitby=(0, 1)).first()
try:
# Try faster than If for the common case where it works
return record.name
except:
# Data inconsistency error!
return current.messages.UNKNOWN_OPT
# END =========================================================================
| mit |
kwailamchan/programming-languages | python/facepp/cmdtools/cmdtool.py | 8 | 2521 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
# $File: cmdtool.py
# $Date: Sat Apr 06 15:42:43 2013 +0800
# $Author: jiakai@megvii.com
#
# This program is free software. It comes without any warranty, to
# the extent permitted by applicable law. You can redistribute it
# and/or modify it under the terms of the Do What The Fuck You Want
# To Public License, Version 2, as published by Sam Hocevar. See
# http://sam.zoy.org/wtfpl/COPYING (copied as below) for more details.
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# Version 2, December 2004
#
# Copyright (C) 2004 Sam Hocevar <sam@hocevar.net>
#
# Everyone is permitted to copy and distribute verbatim or modified
# copies of this license document, and changing it is allowed as long
# as the name is changed.
#
# DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
# TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
#
# 0. You just DO WHAT THE FUCK YOU WANT TO.
def init():
import sys
import os
import os.path
if sys.version_info.major != 2:
sys.exit('Python 2 is required to run this program')
fdir = None
if hasattr(sys, "frozen") and \
sys.frozen in ("windows_exe", "console_exe"):
fdir = os.path.dirname(os.path.abspath(sys.executable))
sys.path.append(fdir)
fdir = os.path.join(fdir, '..')
else:
fdir = os.path.dirname(__file__)
with open(os.path.join(fdir, 'apikey.cfg')) as f:
exec(f.read())
srv = locals().get('SERVER')
from facepp import API
return API(API_KEY, API_SECRET, srv = srv)
api = init()
from facepp import API, File
del init
def _run():
global _run
_run = lambda: None
msg = """
===================================================
Welcome to Face++ Interactive Shell!
Here, you can explore and play with Face++ APIs :)
---------------------------------------------------
Getting Started:
0. Register a user and API key on http://www.faceplusplus.com
1. Write your API key/secret in apikey.cfg
2. Start this interactive shell and try various APIs
For example, to find all faces in a local image file, just type:
api.detection.detect(img = File(r'<path to the image file>'))
Enjoy!
"""
try:
from IPython import embed
embed(banner2 = msg)
except ImportError:
import code
code.interact(msg, local = globals())
if __name__ == '__main__':
_run()
| mit |
wemanuel/smry | Crypto/Signature/PKCS1_v1_5.py | 126 | 8637 | # -*- coding: utf-8 -*-
#
# Signature/PKCS1-v1_5.py : PKCS#1 v1.5
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""
RSA digital signature protocol according to PKCS#1 v1.5
See RFC3447__ or the `original RSA Labs specification`__.
This scheme is more properly called ``RSASSA-PKCS1-v1_5``.
For example, a sender may authenticate a message using SHA-1 like
this:
>>> from Crypto.Signature import PKCS1_v1_5
>>> from Crypto.Hash import SHA
>>> from Crypto.PublicKey import RSA
>>>
>>> message = 'To be signed'
>>> key = RSA.importKey(open('privkey.der').read())
>>> h = SHA.new(message)
>>> signer = PKCS1_v1_5.new(key)
>>> signature = signer.sign(h)
At the receiver side, verification can be done using the public part of
the RSA key:
>>> key = RSA.importKey(open('pubkey.der').read())
>>> h = SHA.new(message)
>>> verifier = PKCS1_v1_5.new(key)
>>> if verifier.verify(h, signature):
>>> print "The signature is authentic."
>>> else:
>>> print "The signature is not authentic."
:undocumented: __revision__, __package__
.. __: http://www.ietf.org/rfc/rfc3447.txt
.. __: http://www.rsa.com/rsalabs/node.asp?id=2125
"""
__revision__ = "$Id$"
__all__ = [ 'new', 'PKCS115_SigScheme' ]
import Crypto.Util.number
from Crypto.Util.number import ceil_div
from Crypto.Util.asn1 import DerSequence, DerNull, DerOctetString
from Crypto.Util.py3compat import *
class PKCS115_SigScheme:
"""This signature scheme can perform PKCS#1 v1.5 RSA signature or verification."""
def __init__(self, key):
"""Initialize this PKCS#1 v1.5 signature scheme object.
:Parameters:
key : an RSA key object
If a private half is given, both signature and verification are possible.
If a public half is given, only verification is possible.
"""
self._key = key
def can_sign(self):
"""Return True if this cipher object can be used for signing messages."""
return self._key.has_private()
def sign(self, mhash):
"""Produce the PKCS#1 v1.5 signature of a message.
This function is named ``RSASSA-PKCS1-V1_5-SIGN``, and is specified in
section 8.2.1 of RFC3447.
:Parameters:
mhash : hash object
The hash that was carried out over the message. This is an object
belonging to the `Crypto.Hash` module.
:Return: The signature encoded as a string.
:Raise ValueError:
If the RSA key length is not sufficiently long to deal with the given
hash algorithm.
:Raise TypeError:
If the RSA key has no private half.
"""
# TODO: Verify the key is RSA
# See 8.2.1 in RFC3447
modBits = Crypto.Util.number.size(self._key.n)
k = ceil_div(modBits,8) # Convert from bits to bytes
# Step 1
em = EMSA_PKCS1_V1_5_ENCODE(mhash, k)
# Step 2a (OS2IP) and 2b (RSASP1)
m = self._key.decrypt(em)
# Step 2c (I2OSP)
S = bchr(0x00)*(k-len(m)) + m
return S
def verify(self, mhash, S):
"""Verify that a certain PKCS#1 v1.5 signature is authentic.
This function checks if the party holding the private half of the key
really signed the message.
This function is named ``RSASSA-PKCS1-V1_5-VERIFY``, and is specified in
section 8.2.2 of RFC3447.
:Parameters:
mhash : hash object
The hash that was carried out over the message. This is an object
belonging to the `Crypto.Hash` module.
S : string
The signature that needs to be validated.
:Return: True if verification is correct. False otherwise.
"""
# TODO: Verify the key is RSA
# See 8.2.2 in RFC3447
modBits = Crypto.Util.number.size(self._key.n)
k = ceil_div(modBits,8) # Convert from bits to bytes
# Step 1
if len(S) != k:
return 0
# Step 2a (O2SIP) and 2b (RSAVP1)
# Note that signature must be smaller than the module
# but RSA.py won't complain about it.
# TODO: Fix RSA object; don't do it here.
m = self._key.encrypt(S, 0)[0]
# Step 2c (I2OSP)
em1 = bchr(0x00)*(k-len(m)) + m
# Step 3
try:
em2 = EMSA_PKCS1_V1_5_ENCODE(mhash, k)
except ValueError:
return 0
# Step 4
# By comparing the full encodings (as opposed to checking each
# of its components one at a time) we avoid attacks to the padding
# scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537).
#
return em1==em2
def EMSA_PKCS1_V1_5_ENCODE(hash, emLen):
"""
Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined
in PKCS#1 v2.1 (RFC3447, 9.2).
``EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input,
and hash it internally. Here, we expect that the message has already
been hashed instead.
:Parameters:
hash : hash object
The hash object that holds the digest of the message being signed.
emLen : int
The length the final encoding must have, in bytes.
:attention: the early standard (RFC2313) stated that ``DigestInfo``
had to be BER-encoded. This means that old signatures
might have length tags in indefinite form, which
is not supported in DER. Such encoding cannot be
reproduced by this function.
:attention: the same standard defined ``DigestAlgorithm`` to be
of ``AlgorithmIdentifier`` type, where the PARAMETERS
item is optional. Encodings for ``MD2/4/5`` without
``PARAMETERS`` cannot be reproduced by this function.
:Return: An ``emLen`` byte long string that encodes the hash.
"""
# First, build the ASN.1 DER object DigestInfo:
#
# DigestInfo ::= SEQUENCE {
# digestAlgorithm AlgorithmIdentifier,
# digest OCTET STRING
# }
#
# where digestAlgorithm identifies the hash function and shall be an
# algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms.
#
# PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= {
# { OID id-md2 PARAMETERS NULL }|
# { OID id-md5 PARAMETERS NULL }|
# { OID id-sha1 PARAMETERS NULL }|
# { OID id-sha256 PARAMETERS NULL }|
# { OID id-sha384 PARAMETERS NULL }|
# { OID id-sha512 PARAMETERS NULL }
# }
#
digestAlgo = DerSequence([hash.oid, DerNull().encode()])
digest = DerOctetString(hash.digest())
digestInfo = DerSequence([
digestAlgo.encode(),
digest.encode()
]).encode()
# We need at least 11 bytes for the remaining data: 3 fixed bytes and
# at least 8 bytes of padding).
if emLen<len(digestInfo)+11:
raise ValueError("Selected hash algorith has a too long digest (%d bytes)." % len(digest))
PS = bchr(0xFF) * (emLen - len(digestInfo) - 3)
return b("\x00\x01") + PS + bchr(0x00) + digestInfo
def new(key):
"""Return a signature scheme object `PKCS115_SigScheme` that
can be used to perform PKCS#1 v1.5 signature or verification.
:Parameters:
key : RSA key object
The key to use to sign or verify the message. This is a `Crypto.PublicKey.RSA` object.
Signing is only possible if *key* is a private RSA key.
"""
return PKCS115_SigScheme(key)
| apache-2.0 |
jjffryan/pymtl | pclib/test/TestRandomDelay.py | 8 | 2895 | #=======================================================================
# TestRandomDelay
#=======================================================================
import random
from pymtl import *
from pclib.ifcs import InValRdyBundle, OutValRdyBundle
#-----------------------------------------------------------------------
# TestRandomDelay
#-----------------------------------------------------------------------
class TestRandomDelay( Model ):
'Inserts random delays between input and output val/rdy interfaces.'
def __init__( s, dtype, max_random_delay = 0, seed=0xb601bc01 ):
s.in_ = InValRdyBundle ( dtype )
s.out = OutValRdyBundle( dtype )
# We keep our own internal random number generator to keep the state
# of this generator completely separate from other generators. This
# ensure that any delays are reproducable.
s.rgen = random.Random()
s.rgen.seed(seed)
# If the maximum random delay is set to zero, then the inputs are
# directly connected to the outputs.
s.max_random_delay = max_random_delay
if max_random_delay == 0:
s.connect( s.in_, s.out )
# Buffer to hold message
s.buf = None
s.buf_full = False
s.counter = 0
#---------------------------------------------------------------------
# Tick
#---------------------------------------------------------------------
@s.tick
def tick():
# Ideally we could just not include this posedge_clk concurrent block
# at all in the simulation. We should be able to do this when we have
# an explicit elaborate function.
if s.max_random_delay == 0:
return
# At the end of the cycle, we AND together the val/rdy bits to
# determine if the input/output message transactions occured.
in_go = s.in_.val and s.in_.rdy
out_go = s.out.val and s.out.rdy
# If the output transaction occured, then clear the buffer full bit.
# Note that we do this _first_ before we process the input
# transaction so we can essentially pipeline this control logic.
if out_go:
s.buf_full = False
# If the input transaction occured, then write the input message into
# our internal buffer, update the buffer full bit, and reset the
# counter.
if in_go:
s.buf = s.in_.msg[:]
s.buf_full = True
s.counter = s.rgen.randint( 1, s.max_random_delay )
if s.counter > 0:
s.counter = s.counter - 1
# The output message is always the output of the buffer
if s.buf_full:
s.out.msg.next = s.buf
# The input is ready and the output is valid if counter is zero
s.in_.rdy.next = ( s.counter == 0 ) and not s.buf_full
s.out.val.next = ( s.counter == 0 ) and s.buf_full
def line_trace( s ):
return "{} ({:2}) {}".format( s.in_, s.counter, s.out )
| bsd-3-clause |
EvolutionClip/pyload | module/plugins/hooks/OverLoadMe.py | 1 | 1605 | # -*- coding: utf-8 -*-
from module.plugins.internal.MultiHook import MultiHook
class OverLoadMe(MultiHook):
__name__ = "OverLoadMe"
__type__ = "hook"
__version__ = "0.04"
__config__ = [("pluginmode" , "all;listed;unlisted", "Use for plugins" , "all"),
("pluginlist" , "str" , "Plugin list (comma separated)" , "" ),
("revertfailed" , "bool" , "Revert to standard download if fails", True ),
("retry" , "int" , "Number of retries before revert" , 10 ),
("retryinterval" , "int" , "Retry interval in minutes" , 1 ),
("reload" , "bool" , "Reload plugin list" , True ),
("reloadinterval", "int" , "Reload interval in hours" , 12 ),
("ssl" , "bool" , "Use HTTPS" , True )]
__description__ = """Over-Load.me hook plugin"""
__license__ = "GPLv3"
__authors__ = [("marley", "marley@over-load.me")]
def getHosters(self):
https = "https" if self.getConfig("ssl") else "http"
page = self.getURL(https + "://api.over-load.me/hoster.php",
get={'auth': "0001-cb1f24dadb3aa487bda5afd3b76298935329be7700cd7-5329be77-00cf-1ca0135f"}).replace("\"", "").strip()
self.logDebug("Hosterlist", page)
return [x.strip() for x in page.split(",") if x.strip()]
| gpl-3.0 |
RuudBurger/CouchPotatoV1 | library/hachoir_parser/misc/gnome_keyring.py | 86 | 6255 | """
Gnome keyring parser.
Sources:
- Gnome Keyring source code,
function generate_file() in keyrings/gkr-keyring.c,
Author: Victor Stinner
Creation date: 2008-04-09
"""
from hachoir_core.tools import paddingSize
from hachoir_parser import Parser
from hachoir_core.field import (FieldSet,
Bit, NullBits, NullBytes,
UInt8, UInt32, String, RawBytes, Enum,
TimestampUnix64, CompressedField,
SubFile)
from hachoir_core.endian import BIG_ENDIAN
try:
import hashlib
def sha256(data):
hash = hashlib.new('sha256')
hash.update(data)
return hash.digest()
except ImportError:
def sha256(data):
raise ImportError("hashlib module is missing")
try:
from Crypto.Cipher import AES
class DeflateStream:
def __init__(self, stream):
hash_iterations = 1234
password = "x" * 8
salt = "\0" * 8
key, iv = generate_key(password, salt, hash_iterations)
self.cipher = AES.new(key, AES.MODE_CBC, iv)
def __call__(self, size, data=None):
if data is None:
return ''
return self.cipher.decrypt(data)
def Deflate(field):
CompressedField(field, DeflateStream)
return field
except ImportError:
def Deflate(field):
return field
class KeyringString(FieldSet):
def createFields(self):
yield UInt32(self, "length")
length = self["length"].value
if length == 0xffffffff:
return
yield String(self, "text", length, charset="UTF-8")
def createValue(self):
if "text" in self:
return self["text"].value
else:
return u''
def createDescription(self):
if "text" in self:
return self["text"].value
else:
return u"(empty string)"
class Attribute(FieldSet):
def createFields(self):
yield KeyringString(self, "name")
yield UInt32(self, "type")
type = self["type"].value
if type == 0:
yield KeyringString(self, "value")
elif type == 1:
yield UInt32(self, "value")
else:
raise TypeError("Unknown attribute type (%s)" % type)
def createDescription(self):
return 'Attribute "%s"' % self["name"].value
class ACL(FieldSet):
def createFields(self):
yield UInt32(self, "types_allowed")
yield KeyringString(self, "display_name")
yield KeyringString(self, "pathname")
yield KeyringString(self, "reserved[]")
yield UInt32(self, "reserved[]")
class Item(FieldSet):
def createFields(self):
yield UInt32(self, "id")
yield UInt32(self, "type")
yield UInt32(self, "attr_count")
for index in xrange(self["attr_count"].value):
yield Attribute(self, "attr[]")
def createDescription(self):
return "Item #%s: %s attributes" % (self["id"].value, self["attr_count"].value)
class Items(FieldSet):
def createFields(self):
yield UInt32(self, "count")
for index in xrange(self["count"].value):
yield Item(self, "item[]")
class EncryptedItem(FieldSet):
def createFields(self):
yield KeyringString(self, "display_name")
yield KeyringString(self, "secret")
yield TimestampUnix64(self, "mtime")
yield TimestampUnix64(self, "ctime")
yield KeyringString(self, "reserved[]")
for index in xrange(4):
yield UInt32(self, "reserved[]")
yield UInt32(self, "attr_count")
for index in xrange(self["attr_count"].value):
yield Attribute(self, "attr[]")
yield UInt32(self, "acl_count")
for index in xrange(self["acl_count"].value):
yield ACL(self, "acl[]")
# size = 8 # paddingSize((self.stream.size - self.current_size) // 8, 16)
# if size:
# yield NullBytes(self, "hash_padding", size, "16 bytes alignment")
class EncryptedData(Parser):
PARSER_TAGS = {
"id": "gnomeencryptedkeyring",
"min_size": 16*8,
"description": u"Gnome encrypted keyring",
}
endian = BIG_ENDIAN
def validate(self):
return True
def createFields(self):
yield RawBytes(self, "md5", 16)
while True:
size = (self.size - self.current_size) // 8
if size < 77:
break
yield EncryptedItem(self, "item[]")
size = paddingSize(self.current_size // 8, 16)
if size:
yield NullBytes(self, "padding_align", size)
class GnomeKeyring(Parser):
MAGIC = "GnomeKeyring\n\r\0\n"
PARSER_TAGS = {
"id": "gnomekeyring",
"category": "misc",
"magic": ((MAGIC, 0),),
"min_size": 47*8,
"description": u"Gnome keyring",
}
CRYPTO_NAMES = {
0: u"AEL",
}
HASH_NAMES = {
0: u"MD5",
}
endian = BIG_ENDIAN
def validate(self):
if self.stream.readBytes(0, len(self.MAGIC)) != self.MAGIC:
return u"Invalid magic string"
return True
def createFields(self):
yield String(self, "magic", len(self.MAGIC), 'Magic string (%r)' % self.MAGIC, charset="ASCII")
yield UInt8(self, "major_version")
yield UInt8(self, "minor_version")
yield Enum(UInt8(self, "crypto"), self.CRYPTO_NAMES)
yield Enum(UInt8(self, "hash"), self.HASH_NAMES)
yield KeyringString(self, "keyring_name")
yield TimestampUnix64(self, "mtime")
yield TimestampUnix64(self, "ctime")
yield Bit(self, "lock_on_idle")
yield NullBits(self, "reserved[]", 31, "Reserved for future flags")
yield UInt32(self, "lock_timeout")
yield UInt32(self, "hash_iterations")
yield RawBytes(self, "salt", 8)
yield NullBytes(self, "reserved[]", 16)
yield Items(self, "items")
yield UInt32(self, "encrypted_size")
yield Deflate(SubFile(self, "encrypted", self["encrypted_size"].value, "AES128 CBC", parser_class=EncryptedData))
def generate_key(password, salt, hash_iterations):
sha = sha256(password+salt)
for index in xrange(hash_iterations-1):
sha = sha256(sha)
return sha[:16], sha[16:]
| gpl-3.0 |
weaver-viii/h2o-3 | h2o-py/tests/testdir_jira/pyunit_NOPASS_hex_1897_glm_offset.py | 3 | 2412 | import sys
sys.path.insert(1, "../../")
import h2o
def offset_1897(ip, port):
print 'Checking binomial models for GLM with and without offset'
print 'Import prostate dataset into H2O and R...'
prostate_hex = h2o.import_file(h2o.locate("smalldata/prostate/prostate.csv"))
print "Checking binomial model without offset..."
prostate_glm_h2o = h2o.glm(x=prostate_hex["RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON"],
y=prostate_hex["CAPSULE"], training_frame=prostate_hex, family="binomial", standardize=False)
print "h2o residual: {0}".format(prostate_glm_h2o.residual_deviance())
print "r residual: {0}".format(379.053509501537)
assert abs(379.053509501537 - prostate_glm_h2o.residual_deviance()) < 0.1
print "Checking binomial model with offset..."
prostate_glm_h2o = h2o.glm(x=prostate_hex["RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON", "AGE"],
y=prostate_hex["CAPSULE"], training_frame=prostate_hex, family="binomial",
offset_column = "AGE", standardize = False)
print "h2o residual: {0}".format(prostate_glm_h2o.residual_deviance())
print "r residual: {0}".format(1515.91815848623)
assert abs(1515.91815848623 - prostate_glm_h2o.residual_deviance()) < 0.1
print "Checking binomial model without offset..."
prostate_glm_h2o = h2o.glm(x=prostate_hex["RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON"],
y=prostate_hex["CAPSULE"], training_frame=prostate_hex, family="poisson", standardize=False)
print "h2o residual: {0}".format(prostate_glm_h2o.residual_deviance())
print "r residual: {0}".format(216.339989007507)
assert abs(216.339989007507 - prostate_glm_h2o.residual_deviance()) < 0.1
print "Checking binomial model with offset..."
prostate_glm_h2o = h2o.glm(x=prostate_hex["RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON", "AGE"],
y=prostate_hex["CAPSULE"], training_frame=prostate_hex, family="poisson",
offset_column = "AGE", standardize = False)
print "h2o residual: {0}".format(prostate_glm_h2o.residual_deviance())
print "r residual: {0}".format(2761.76218461138)
assert abs(2761.76218461138 - prostate_glm_h2o.residual_deviance()) < 0.1
if __name__ == "__main__":
h2o.run_test(sys.argv, offset_1897)
| apache-2.0 |
dennex/DJMarlin | createTemperatureLookupMarlin.py | 56 | 4662 | #!/usr/bin/python
#
# Creates a C code lookup table for doing ADC to temperature conversion
# on a microcontroller
# based on: http://hydraraptor.blogspot.com/2007/10/measuring-temperature-easy-way.html
"""Thermistor Value Lookup Table Generator
Generates lookup to temperature values for use in a microcontroller in C format based on:
http://hydraraptor.blogspot.com/2007/10/measuring-temperature-easy-way.html
The main use is for Arduino programs that read data from the circuit board described here:
http://make.rrrf.org/ts-1.0
Usage: python createTemperatureLookup.py [options]
Options:
-h, --help show this help
--rp=... pull-up resistor
--t0=ttt:rrr low temperature temperature:resistance point (around 25C)
--t1=ttt:rrr middle temperature temperature:resistance point (around 150C)
--t2=ttt:rrr high temperature temperature:resistance point (around 250C)
--num-temps=... the number of temperature points to calculate (default: 20)
"""
from math import *
import sys
import getopt
class Thermistor:
"Class to do the thermistor maths"
def __init__(self, rp, t1, r1, t2, r2, t3, r3):
t1 = t1 + 273.15 # low temperature (25C)
r1 = r1 # resistance at low temperature
t2 = t2 + 273.15 # middle temperature (150C)
r2 = r2 # resistance at middle temperature
t3 = t3 + 273.15 # high temperature (250C)
r3 = r3 # resistance at high temperature
self.rp = rp # pull-up resistance
self.vadc = 5.0 # ADC reference
self.vcc = 5.0 # supply voltage to potential divider
a1 = log(r1)
a2 = log(r2)
a3 = log(r3)
z = a1 - a2
y = a1 - a3
x = 1/t1 - 1/t2
w = 1/t1 - 1/t3
v = pow(a1,3) - pow(a2,3)
u = pow(a1,3) - pow(a3,3)
c3 = (x-z*w/y)/(v-z*u/y)
c2 = (x-c3*v)/z
c1 = 1/t1-c3*pow(a1,3)-c2*a1
self.c1 = c1
self.c2 = c2
self.c3 = c3
def temp(self,adc):
"Convert ADC reading into a temperature in Celcius"
v = adc * self.vadc / (1024 * 16) # convert the 10 bit ADC value to a voltage
r = self.rp * v / (self.vcc - v) # resistance of thermistor
lnr = log(r)
Tinv = self.c1 + (self.c2*lnr) + (self.c3*pow(lnr,3))
return (1/Tinv) - 273.15 # temperature
def adc(self,temp):
"Convert temperature into a ADC reading"
y = (self.c1 - (1/(temp+273.15))) / (2*self.c3)
x = sqrt(pow(self.c2 / (3*self.c3),3) + pow(y,2))
r = exp(pow(x-y,1.0/3) - pow(x+y,1.0/3)) # resistance of thermistor
return (r / (self.rp + r)) * (1024*16)
def main(argv):
rp = 4700;
t1 = 25;
r1 = 100000;
t2 = 150;
r2 = 1641.9;
t3 = 250;
r3 = 226.15;
num_temps = int(36);
try:
opts, args = getopt.getopt(argv, "h", ["help", "rp=", "t1=", "t2=", "t3=", "num-temps="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit()
elif opt == "--rp":
rp = int(arg)
elif opt == "--t1":
arg = arg.split(':')
t1 = float( arg[0])
r1 = float( arg[1])
elif opt == "--t2":
arg = arg.split(':')
t2 = float( arg[0])
r2 = float( arg[1])
elif opt == "--t3":
arg = arg.split(':')
t3 = float( arg[0])
r3 = float( arg[1])
elif opt == "--num-temps":
num_temps = int(arg)
max_adc = (1024 * 16) - 1
min_temp = 0
max_temp = 350
increment = int(max_adc/(num_temps-1));
t = Thermistor(rp, t1, r1, t2, r2, t3, r3)
tmp = (min_temp - max_temp) / (num_temps-1)
print tmp
temps = range(max_temp, min_temp + tmp, tmp);
print "// Thermistor lookup table for Marlin"
print "// ./createTemperatureLookup.py --rp=%s --t1=%s:%s --t2=%s:%s --t3=%s:%s --num-temps=%s" % (rp, t1, r1, t2, r2, t3, r3, num_temps)
print "#define NUMTEMPS %s" % (len(temps))
print "short temptable[NUMTEMPS][2] = {"
counter = 0
for temp in temps:
counter = counter +1
if counter == len(temps):
print " {%s, %s}" % (int(t.adc(temp)), temp)
else:
print " {%s, %s}," % (int(t.adc(temp)), temp)
print "};"
def usage():
print __doc__
if __name__ == "__main__":
main(sys.argv[1:])
| gpl-3.0 |
sillydan1/WhatEverEngine | packages/IronPython.StdLib.2.7.5/content/Lib/pkgutil.py | 74 | 20013 | """Utilities to support packages."""
# NOTE: This module must remain compatible with Python 2.3, as it is shared
# by setuptools for distribution with Python 2.3 and up.
import os
import sys
import imp
import os.path
from types import ModuleType
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules', 'get_data',
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
]
def read_code(stream):
# This helper is needed in order for the PEP 302 emulation to
# correctly handle compiled files
import marshal
magic = stream.read(4)
if magic != imp.get_magic():
return None
stream.read(4) # Skip timestamp
return marshal.load(stream)
def simplegeneric(func):
"""Make a trivial single-dispatch generic function"""
registry = {}
def wrapper(*args, **kw):
ob = args[0]
try:
cls = ob.__class__
except AttributeError:
cls = type(ob)
try:
mro = cls.__mro__
except AttributeError:
try:
class cls(cls, object):
pass
mro = cls.__mro__[1:]
except TypeError:
mro = object, # must be an ExtensionClass or some such :(
for t in mro:
if t in registry:
return registry[t](*args, **kw)
else:
return func(*args, **kw)
try:
wrapper.__name__ = func.__name__
except (TypeError, AttributeError):
pass # Python 2.3 doesn't allow functions to be renamed
def register(typ, func=None):
if func is None:
return lambda f: register(typ, f)
registry[typ] = func
return func
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
wrapper.register = register
return wrapper
def walk_packages(path=None, prefix='', onerror=None):
"""Yields (module_loader, name, ispkg) for all modules recursively
on path, or, if path is None, all accessible modules.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
Note that this function must import all *packages* (NOT all
modules!) on the given path, in order to access the __path__
attribute to find submodules.
'onerror' is a function which gets called with one argument (the
name of the package which was being imported) if any exception
occurs while trying to import a package. If no onerror function is
supplied, ImportErrors are caught and ignored, while all other
exceptions are propagated, terminating the search.
Examples:
# list all modules python can access
walk_packages()
# list all submodules of ctypes
walk_packages(ctypes.__path__, ctypes.__name__+'.')
"""
def seen(p, m={}):
if p in m:
return True
m[p] = True
for importer, name, ispkg in iter_modules(path, prefix):
yield importer, name, ispkg
if ispkg:
try:
__import__(name)
except ImportError:
if onerror is not None:
onerror(name)
except Exception:
if onerror is not None:
onerror(name)
else:
raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
for item in walk_packages(path, name+'.', onerror):
yield item
def iter_modules(path=None, prefix=''):
"""Yields (module_loader, name, ispkg) for all submodules on path,
or, if path is None, all top-level modules on sys.path.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
"""
if path is None:
importers = iter_importers()
else:
importers = map(get_importer, path)
yielded = {}
for i in importers:
for name, ispkg in iter_importer_modules(i, prefix):
if name not in yielded:
yielded[name] = 1
yield i, name, ispkg
#@simplegeneric
def iter_importer_modules(importer, prefix=''):
if not hasattr(importer, 'iter_modules'):
return []
return importer.iter_modules(prefix)
iter_importer_modules = simplegeneric(iter_importer_modules)
class ImpImporter:
"""PEP 302 Importer that wraps Python's "classic" import algorithm
ImpImporter(dirname) produces a PEP 302 importer that searches that
directory. ImpImporter(None) produces a PEP 302 importer that searches
the current sys.path, plus any modules that are frozen or built-in.
Note that ImpImporter does not currently support being used by placement
on sys.meta_path.
"""
def __init__(self, path=None):
self.path = path
def find_module(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
path = [os.path.realpath(self.path)]
try:
file, filename, etc = imp.find_module(subname, path)
except ImportError:
return None
return ImpLoader(fullname, file, filename, etc)
def iter_modules(self, prefix=''):
if self.path is None or not os.path.isdir(self.path):
return
yielded = {}
import inspect
filenames = os.listdir(self.path)
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(self.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
for fn in os.listdir(path):
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
class ImpLoader:
"""PEP 302 Loader that wraps Python's "classic" import algorithm
"""
code = source = None
def __init__(self, fullname, file, filename, etc):
self.file = file
self.filename = filename
self.fullname = fullname
self.etc = etc
def load_module(self, fullname):
self._reopen()
try:
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
finally:
if self.file:
self.file.close()
# Note: we don't set __loader__ because we want the module to look
# normal; i.e. this is just a wrapper for standard import machinery
return mod
def get_data(self, pathname):
return open(pathname, "rb").read()
def _reopen(self):
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'rU')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
def _fix_name(self, fullname):
if fullname is None:
fullname = self.fullname
elif fullname != self.fullname:
raise ImportError("Loader for module %s cannot handle "
"module %s" % (self.fullname, fullname))
return fullname
def is_package(self, fullname):
fullname = self._fix_name(fullname)
return self.etc[2]==imp.PKG_DIRECTORY
def get_code(self, fullname=None):
fullname = self._fix_name(fullname)
if self.code is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
source = self.get_source(fullname)
self.code = compile(source, self.filename, 'exec')
elif mod_type==imp.PY_COMPILED:
self._reopen()
try:
self.code = read_code(self.file)
finally:
self.file.close()
elif mod_type==imp.PKG_DIRECTORY:
self.code = self._get_delegate().get_code()
return self.code
def get_source(self, fullname=None):
fullname = self._fix_name(fullname)
if self.source is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self._reopen()
try:
self.source = self.file.read()
finally:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
f = open(self.filename[:-1], 'rU')
self.source = f.read()
f.close()
elif mod_type==imp.PKG_DIRECTORY:
self.source = self._get_delegate().get_source()
return self.source
def _get_delegate(self):
return ImpImporter(self.filename).find_module('__init__')
def get_filename(self, fullname=None):
fullname = self._fix_name(fullname)
mod_type = self.etc[2]
if self.etc[2]==imp.PKG_DIRECTORY:
return self._get_delegate().get_filename()
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
return self.filename
return None
try:
import zipimport
from zipimport import zipimporter
def iter_zipimport_modules(importer, prefix=''):
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
dirlist.sort()
_prefix = importer.prefix
plen = len(_prefix)
yielded = {}
import inspect
for fn in dirlist:
if not fn.startswith(_prefix):
continue
fn = fn[plen:].split(os.sep)
if len(fn)==2 and fn[1].startswith('__init__.py'):
if fn[0] not in yielded:
yielded[fn[0]] = 1
yield fn[0], True
if len(fn)!=1:
continue
modname = inspect.getmodulename(fn[0])
if modname=='__init__':
continue
if modname and '.' not in modname and modname not in yielded:
yielded[modname] = 1
yield prefix + modname, False
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
except ImportError:
pass
def get_importer(path_item):
"""Retrieve a PEP 302 importer for the given path item
The returned importer is cached in sys.path_importer_cache
if it was newly created by a path hook.
If there is no importer, a wrapper around the basic import
machinery is returned. This wrapper is never inserted into
the importer cache (None is inserted instead).
The cache (or part of it) can be cleared manually if a
rescan of sys.path_hooks is necessary.
"""
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
break
except ImportError:
pass
else:
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
if importer is None:
try:
importer = ImpImporter(path_item)
except ImportError:
importer = None
return importer
def iter_importers(fullname=""):
"""Yield PEP 302 importers for the given module name
If fullname contains a '.', the importers will be for the package
containing fullname, otherwise they will be importers for sys.meta_path,
sys.path, and Python's "classic" import machinery, in that order. If
the named module is in a package, that package is imported as a side
effect of invoking this function.
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
standard import machinery to find files in alternative locations
are partially supported, but are searched AFTER sys.path. Normally,
these locations are searched BEFORE sys.path, preventing sys.path
entries from shadowing them.
For this to cause a visible difference in behaviour, there must
be a module or package name that is accessible via both sys.path
and one of the non PEP 302 file system mechanisms. In this case,
the emulation will find the former version, while the builtin
import mechanism will find the latter.
Items of the following types can be affected by this discrepancy:
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
"""
if fullname.startswith('.'):
raise ImportError("Relative module names not supported")
if '.' in fullname:
# Get the containing package's __path__
pkg = '.'.join(fullname.split('.')[:-1])
if pkg not in sys.modules:
__import__(pkg)
path = getattr(sys.modules[pkg], '__path__', None) or []
else:
for importer in sys.meta_path:
yield importer
path = sys.path
for item in path:
yield get_importer(item)
if '.' not in fullname:
yield ImpImporter()
def get_loader(module_or_name):
"""Get a PEP 302 "loader" object for module_or_name
If the module or package is accessible via the normal import
mechanism, a wrapper around the relevant part of that machinery
is returned. Returns None if the module cannot be found or imported.
If the named module is not already imported, its containing package
(if any) is imported, in order to establish the package __path__.
This function uses iter_importers(), and is thus subject to the same
limitations regarding platform-specific special import locations such
as the Windows registry.
"""
if module_or_name in sys.modules:
module_or_name = sys.modules[module_or_name]
if isinstance(module_or_name, ModuleType):
module = module_or_name
loader = getattr(module, '__loader__', None)
if loader is not None:
return loader
fullname = module.__name__
else:
fullname = module_or_name
return find_loader(fullname)
def find_loader(fullname):
"""Find a PEP 302 "loader" object for fullname
If fullname contains dots, path must be the containing package's __path__.
Returns None if the module cannot be found or imported. This function uses
iter_importers(), and is thus subject to the same limitations regarding
platform-specific special import locations such as the Windows registry.
"""
for importer in iter_importers(fullname):
loader = importer.find_module(fullname)
if loader is not None:
return loader
return None
def extend_path(path, name):
"""Extend a package's path.
Intended use is to place the following code in a package's __init__.py:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
This will add to the package's __path__ all subdirectories of
directories on sys.path named after the package. This is useful
if one wants to distribute different parts of a single logical
package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
except that it doesn't special-case lines starting with 'import'.
A *.pkg file is trusted at face value: apart from checking for
duplicates, all entries found in a *.pkg file are added to the
path, regardless of whether they are exist the filesystem. (This
is a feature.)
If the input path is not a list (as is the case for frozen
packages) it is returned unchanged. The input path is not
modified; an extended copy is returned. Items are only appended
to the copy at the end.
It is assumed that sys.path is a sequence. Items of sys.path that
are not (unicode or 8-bit) strings referring to existing
directories are ignored. Unicode items of sys.path that cause
errors when used as filenames may cause this function to raise an
exception (in line with os.path.isdir() behavior).
"""
if not isinstance(path, list):
# This could happen e.g. when this is called from inside a
# frozen package. Return the path unchanged in that case.
return path
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
# Just in case os.extsep != '.'
sname = os.extsep.join(name.split('.'))
sname_pkg = sname + os.extsep + "pkg"
init_py = "__init__" + os.extsep + "py"
path = path[:] # Start with a copy of the existing path
for dir in sys.path:
if not isinstance(dir, basestring) or not os.path.isdir(dir):
continue
subdir = os.path.join(dir, pname)
# XXX This may still add duplicate entries to path on
# case-insensitive filesystems
initfile = os.path.join(subdir, init_py)
if subdir not in path and os.path.isfile(initfile):
path.append(subdir)
# XXX Is this the right thing for subpackages like zope.app?
# It looks for a file named "zope.app.pkg"
pkgfile = os.path.join(dir, sname_pkg)
if os.path.isfile(pkgfile):
try:
f = open(pkgfile)
except IOError, msg:
sys.stderr.write("Can't open %s: %s\n" %
(pkgfile, msg))
else:
for line in f:
line = line.rstrip('\n')
if not line or line.startswith('#'):
continue
path.append(line) # Don't check for existence!
f.close()
return path
def get_data(package, resource):
"""Get a resource from a package.
This is a wrapper round the PEP 302 loader get_data API. The package
argument should be the name of a package, in standard module format
(foo.bar). The resource argument should be in the form of a relative
filename, using '/' as the path separator. The parent directory name '..'
is not allowed, and nor is a rooted name (starting with a '/').
The function returns a binary string, which is the contents of the
specified resource.
For packages located in the filesystem, which have already been imported,
this is the rough equivalent of
d = os.path.dirname(sys.modules[package].__file__)
data = open(os.path.join(d, resource), 'rb').read()
If the package cannot be located or loaded, or it uses a PEP 302 loader
which does not support get_data(), then None is returned.
"""
loader = get_loader(package)
if loader is None or not hasattr(loader, 'get_data'):
return None
mod = sys.modules.get(package) or loader.load_module(package)
if mod is None or not hasattr(mod, '__file__'):
return None
# Modify the resource name to be compatible with the loader.get_data
# signature - an os.path format "filename" starting with the dirname of
# the package's __file__
parts = resource.split('/')
parts.insert(0, os.path.dirname(mod.__file__))
resource_name = os.path.join(*parts)
return loader.get_data(resource_name)
| apache-2.0 |
ChrisYammine/ChrisYammine.github.io | node_modules/grunt-docker/node_modules/docker/node_modules/pygmentize-bundled/vendor/pygments/pygments/styles/borland.py | 364 | 1562 | # -*- coding: utf-8 -*-
"""
pygments.styles.borland
~~~~~~~~~~~~~~~~~~~~~~~
Style similar to the style used in the Borland IDEs.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class BorlandStyle(Style):
"""
Style similar to the style used in the borland IDEs.
"""
default_style = ''
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #008800',
Comment.Preproc: 'noitalic #008080',
Comment.Special: 'noitalic bold',
String: '#0000FF',
String.Char: '#800080',
Number: '#0000FF',
Keyword: 'bold #000080',
Operator.Word: 'bold',
Name.Tag: 'bold #000080',
Name.Attribute: '#FF0000',
Generic.Heading: '#999999',
Generic.Subheading: '#aaaaaa',
Generic.Deleted: 'bg:#ffdddd #000000',
Generic.Inserted: 'bg:#ddffdd #000000',
Generic.Error: '#aa0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: '#555555',
Generic.Output: '#888888',
Generic.Traceback: '#aa0000',
Error: 'bg:#e3d2d2 #a61717'
}
| mit |
zathras777/pywind | pywind/elexon/cmd.py | 1 | 11805 | from datetime import timedelta, time, datetime, date
from pywind.elexon.api import B1420, B1330, B1320, FUELINST, \
DERSYSDATA, DERBMDATA, BMUNITSEARCH, \
B1610, B1630, UOU2T52W
from pywind.elexon.unit import BalancingData
from pywind.utils import StdoutFormatter, args_get_datetime
def check_api_key(args):
if args.apikey is None:
print("You MUST supply an API key to access Elexon data.")
print("Registration is free, but you need to go to the URL below and register.")
print("https://www.elexonportal.co.uk/registration/newuser")
return False
return True
def get_check_data(api, params):
if not api.get_data(**params):
print("No data returned.")
return False
return True
def elexon_generation_inst(args):
""" Generation Data at 5 minute intervals from the Elexon Data Portal """
if not check_api_key(args):
return None
api = FUELINST(args.apikey)
args_get_datetime(args)
params = {}
if args.fromdatetime is not None or args.todatetime is not None:
params['FromDateTime'] = args.fromdatetime if args.fromdatetime else args.todatetime - timedelta(days=1)
params['ToDateTime'] = args.todatetime if args.todatetime else args.fromdatetime + timedelta(days=1)
else:
print("Getting data for yesterday as no dates specified.")
params['FromDateTime'] = datetime.combine(date.today() - timedelta(days=2), time(23, 59))
params['ToDateTime'] = datetime.combine(date.today() - timedelta(days=1), time(23, 59))
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("10s", "6s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s", "7s")
print("\n" + fmt.titles('Date', 'Time', 'Period', 'CCGT', 'Oil', 'Coal', 'Nuclear', 'Wind', 'PS', 'NPSHYD', 'OCGT',
'Other', 'Int Fr', 'Int Irl', 'Int Ned', 'Int E/W'))
for item in api.items:
print(fmt.row(item['date'].strftime("%Y-%m-%d"),
item['time'].strftime("%H:%M"),
item['settlementperiod'],
item['ccgt'],
item['oil'],
item['coal'],
item['nuclear'],
item['wind'],
item['ps'],
item['npshyd'],
item['ocgt'],
item['other'],
item['intfr'],
item['intirl'],
item['intned'],
item['intew'],
))
return api
def elexon_b1320(args):
""" Congestion Management Measures Countertrading """
if not check_api_key(args):
return None
print("This report has *VERY* sparse data.")
api = B1320(args.apikey)
if args.date is None:
print("You MUST supply a date for this report.")
return None
if args.period is None:
print("You MUST supply a period for this report, from 1 to 50")
return None
params = {'SettlementDate': args.date,
'Period': args.period}
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("12s", "8s", "10.4f", "9s", "6s", "20s", "10s")
print("\n" + fmt.titles('Date', 'Period', 'Quantity', 'Direction', 'Active', 'Reason', 'Resolution'))
for item in api.items:
print(fmt.row(item['settlementdate'],
item['settlementperiod'],
item['quantity'],
item['flowdirection'],
str(item['activeflag']),
item['reasoncode'],
item['resolution']))
return api
def elexon_b1330(args):
""" Congestion Management Measures Costs of Congestion Management Service """
if args.apikey is None:
print("You MUST supply an API key to access Elexon data")
return None
if args.year is None:
print("You MUST supply a year for this report.")
return None
if args.month is None:
print("You MUST supply a month for this report.")
return None
MONTHS = [
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'
]
api = B1330(args.apikey)
params = {'Year': args.year or 2016,
'Month': MONTHS[args.month - 1 or 8]}
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("4d", "5s", "40s", "8s")
print("\n" + fmt.titles('Year', 'Mon', 'Document Id', 'Rev. Num'))
for item in api.items:
print(fmt.row(item['year'], item['month'], item['documentid'], item['documentrevnum']))
return api
def elexon_b1420(args):
""" Installed Generation Capacity per Unit """
if not check_api_key(args):
return None
api = B1420(args.apikey)
if not api.get_data(**{'Year': args.year or 2016}):
print("No data returned.")
return None
fmt = StdoutFormatter("30s", "8s", "10s", "6s", "10.1f", "20s")
print("\n" + fmt.titles('Resource Name', 'NGC Id', 'BM Unit Id', 'Active', 'Output', 'Type'))
for item in sorted(api.items, key=lambda xxx: xxx['ngcbmunitid']):
print(fmt.row(item['registeredresourcename'],
item['ngcbmunitid'],
item['bmunitid'],
str(item['activeflag']),
float(item['nominal']),
item.get('powersystemresourcetype', 'n/a')))
return api
def elexon_b1610(args):
""" Generated output by generator """
if not check_api_key(args):
return None
api = B1610(args.apikey)
if args.settlement_period is None:
print("A settlement period should be supplied using the --settlement-period flag (range 1 to 50)."
"Defaulting to 1")
if args.date is None:
print("A date should be supplied using the --date flag. Format is YYYY-MM-DD. Defaulting to today")
if not api.get_data(**{'SettlementDate': args.date or date.today().strftime("%Y-%m-%d"),
'Period': args.settlement_period or 1}):
print("No data returned.")
return None
fmt = StdoutFormatter("8s", "10s", "6s", "6s", "10.1f", "20s", "30s")
print("\n" + fmt.titles('NGC Unit', 'Date', 'Period', 'Active', 'Output', 'Type', 'Reference'))
for item in sorted(api.items, key=lambda xxx: xxx['ngcbmunitid']):
print(fmt.row(item['ngcbmunitid'],
item['settlementdate'],
str(item['settlementperiod']),
str(item['activeflag']),
float(item['quantity']),
item.get('powersystemresourcetype', 'n/a'),
item['documentid'] + " - " + item['documentrevnum']))
return api
def elexon_b1630(args):
""" Actual or forecast Wind & Solar Generation """
if not check_api_key(args):
return None
api = B1630(args.apikey)
if args.settlement_period is None:
print("A settlement period should be supplied using the --settlement-period flag (range 1 to 50)."
"Defaulting to 1")
if args.date is None:
print("A date should be supplied using the --date flag. Format is YYYY-MM-DD. Defaulting to today")
if not api.get_data(**{'SettlementDate': args.date or date.today().strftime("%Y-%m-%d"),
'Period': args.settlement_period or 1}):
print("No data returned.")
return None
fmt = StdoutFormatter("10s", "6s", "6s", "10.1f", "20s", "30s")
print("\n" + fmt.titles('Date', 'Period', 'Active', 'Output', 'Type', 'Reference'))
for item in sorted(api.items, key=lambda xxx: xxx['documentid']):
print(fmt.row(item['settlementdate'],
str(item['settlementperiod']),
str(item['activeflag']),
float(item['quantity']),
item.get('powersystemresourcetype', 'n/a'),
item['documentid'] + " - " + item['documentrevnum']))
return api
def elexon_sbp(args):
""" Derived System Prices from Elexon """
if not check_api_key(args):
return None
api = DERSYSDATA(args.apikey)
params = {
'FromSettlementDate': args.fromdate or date.today() - timedelta(days=1),
'ToSettlementDate': args.todate or args.fromdate or (date.today()) - timedelta(days=1)
}
if args.period is not None:
params['SettlementPeriod'] = args.period
if args.all_periods:
params['SettlementPeriod'] = '*'
if get_check_data(api, params) is False:
return None
fmt = StdoutFormatter("15s", "^20d", "15.4f", "15.4f", "4s")
print("\nSystem adjustments are included in the figures shown below where '*' is shown.\n")
print("\n" + fmt.titles('Date', 'Settlement Period', 'Sell Price', 'Buy Price', 'Adj?'))
for item in api.items:
print(fmt.row(item['settlementdate'].strftime("%Y %b %d"),
item['settlementperiod'],
item['systemsellprice'] + item['sellpriceadjustment'],
item['systembuyprice'] + item['buypriceadjustment'],
"*" if item['sellpriceadjustment'] + item['buypriceadjustment'] > 0 else ''
))
return api
def elexon_bm_data(args):
""" Derived System Prices from Elexon """
if not check_api_key(args):
return None
bd = BalancingData(args.apikey)
params = {
'SettlementDate': args.date or date.today() - timedelta(days=1),
'SettlementPeriod': args.period or 1
}
if args.all_periods:
params['SettlementPeriod'] = '*'
if not bd.get_data(**params):
return None
fmt = StdoutFormatter('12s', '^7d', '16.4f', '16.4f', '18.4f', '18.4f', '12.4f', '12.4f')
print("\n" + fmt.titles('Unit Name', 'Period', 'Bid Volume', 'Offer Volume',
'Bid Cashflow', 'Offer Cashflow', 'Bid Rate', 'Offer Rate'))
for unit_name in sorted(bd.units):
unit = bd.units[unit_name]
for period in sorted(unit.periods):
pd = unit.periods[period]
print(fmt.row(unit.unit,
period,
pd.bid_volume,
pd.offer_volume,
pd.bid_cashflow,
pd.offer_cashflow,
pd.bid_rate,
pd.offer_rate))
return bd.api
def elexon_bm_unit(args):
""" Balancing Mechanism Unit information from Elexon """
if not check_api_key(args):
return None
api = BMUNITSEARCH(args.apikey)
params = {
'BMUnitType': args.unit_type or '*'
}
if not get_check_data(api, params):
return None
print("Total of {} units\n".format(len(api.items)))
fmt = StdoutFormatter('12s', '12s', '^8s', '30s', '50s')
print("\n" + fmt.titles('NGC ID', 'BM ID', 'Active ?', 'BM Type', 'Lead Party Name'))
for item in sorted(api.items, key=lambda x: x['ngcbmunitname']):
print(fmt.row(item['ngcbmunitname'],
item['bmunitid'],
'Y' if item['activeflag'] else 'N',
"{}, {}".format(item['bmunittype'], item['category']),
item['leadpartyname']))
return api
def elexon_uou2t52w(args):
""" Generator output for 52 weeks by unit and fueld type """
if not check_api_key(args):
return None
api = UOU2T52W(args.apikey)
params = {}
if not get_check_data(api, params):
return None
return api
| unlicense |
dkodnik/arp | addons/document/odt2txt.py | 435 | 2110 | #!/usr/bin/python
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sys, zipfile, xml.dom.minidom
import StringIO
class OpenDocumentTextFile :
def __init__ (self, filepath):
zip = zipfile.ZipFile(filepath)
self.content = xml.dom.minidom.parseString(zip.read("content.xml"))
def toString (self):
""" Converts the document to a string. """
buffer = u""
for val in ["text:p", "text:h", "text:list"]:
for paragraph in self.content.getElementsByTagName(val) :
buffer += self.textToString(paragraph) + "\n"
return buffer
def textToString(self, element):
buffer = u""
for node in element.childNodes :
if node.nodeType == xml.dom.Node.TEXT_NODE :
buffer += node.nodeValue
elif node.nodeType == xml.dom.Node.ELEMENT_NODE :
buffer += self.textToString(node)
return buffer
if __name__ == "__main__" :
s =StringIO.StringIO(file(sys.argv[1]).read())
odt = OpenDocumentTextFile(s)
print odt.toString().encode('ascii','replace')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
LLNL/spack | lib/spack/spack/cmd/configure.py | 2 | 3097 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import argparse
import llnl.util.tty as tty
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.cmd.install as inst
from spack.build_systems.autotools import AutotoolsPackage
from spack.build_systems.cmake import CMakePackage
from spack.build_systems.qmake import QMakePackage
from spack.build_systems.waf import WafPackage
from spack.build_systems.perl import PerlPackage
from spack.build_systems.intel import IntelPackage
from spack.build_systems.meson import MesonPackage
from spack.build_systems.sip import SIPPackage
description = 'DEPRECATED: stage and configure a package but do not install'
section = "build"
level = "long"
build_system_to_phase = {
AutotoolsPackage: 'configure',
CMakePackage: 'cmake',
QMakePackage: 'qmake',
WafPackage: 'configure',
PerlPackage: 'configure',
IntelPackage: 'configure',
MesonPackage: 'meson',
SIPPackage: 'configure',
}
def setup_parser(subparser):
subparser.add_argument(
'-v', '--verbose',
action='store_true',
help="print additional output during builds"
)
arguments.add_common_arguments(subparser, ['spec'])
def _stop_at_phase_during_install(args, calling_fn, phase_mapping):
if not args.package:
tty.die("configure requires at least one package argument")
# TODO: to be refactored with code in install
specs = spack.cmd.parse_specs(args.package, concretize=True)
if len(specs) != 1:
tty.error('only one spec can be installed at a time.')
spec = specs.pop()
pkg = spec.package
try:
key = [cls for cls in phase_mapping if isinstance(pkg, cls)].pop()
phase = phase_mapping[key]
# Install package dependencies if needed
parser = argparse.ArgumentParser()
inst.setup_parser(parser)
tty.msg('Checking dependencies for {0}'.format(args.spec[0]))
cli_args = ['-v'] if args.verbose else []
install_args = parser.parse_args(cli_args + ['--only=dependencies'])
install_args.spec = args.spec
inst.install(parser, install_args)
# Install package and stop at the given phase
cli_args = ['-v'] if args.verbose else []
install_args = parser.parse_args(cli_args + ['--only=package'])
install_args.spec = args.spec
inst.install(parser, install_args, stop_at=phase)
except IndexError:
tty.error(
'Package {0} has no {1} phase, or its {1} phase is not separated from install'.format( # NOQA: ignore=E501
spec.name, calling_fn.__name__)
)
def configure(parser, args):
tty.warn("This command is deprecated. Use `spack install --until` to"
" select an end phase instead. The `spack configure` command will"
" be removed in a future version of Spack.")
_stop_at_phase_during_install(args, configure, build_system_to_phase)
| lgpl-2.1 |
amghost/myblog | node_modules/pygmentize-bundled/vendor/pygments/pygments/lexers/_phpbuiltins.py | 274 | 122046 | # -*- coding: utf-8 -*-
"""
pygments.lexers._phpbuiltins
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This file loads the function names and their modules from the
php webpage and generates itself.
Do not alter the MODULES dict by hand!
WARNING: the generation transfers quite much data over your
internet connection. don't run that at home, use
a server ;-)
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
MODULES = {'.NET': ['dotnet_load'],
'APC': ['apc_add',
'apc_bin_dump',
'apc_bin_dumpfile',
'apc_bin_load',
'apc_bin_loadfile',
'apc_cache_info',
'apc_cas',
'apc_clear_cache',
'apc_compile_file',
'apc_dec',
'apc_define_constants',
'apc_delete_file',
'apc_delete',
'apc_exists',
'apc_fetch',
'apc_inc',
'apc_load_constants',
'apc_sma_info',
'apc_store'],
'APD': ['apd_breakpoint',
'apd_callstack',
'apd_clunk',
'apd_continue',
'apd_croak',
'apd_dump_function_table',
'apd_dump_persistent_resources',
'apd_dump_regular_resources',
'apd_echo',
'apd_get_active_symbols',
'apd_set_pprof_trace',
'apd_set_session_trace_socket',
'apd_set_session_trace',
'apd_set_session',
'override_function',
'rename_function'],
'Aliases and deprecated Mysqli': ['mysqli_bind_param',
'mysqli_bind_result',
'mysqli_client_encoding',
'mysqli_connect',
'mysqli_disable_reads_from_master',
'mysqli_disable_rpl_parse',
'mysqli_enable_reads_from_master',
'mysqli_enable_rpl_parse',
'mysqli_escape_string',
'mysqli_execute',
'mysqli_fetch',
'mysqli_get_metadata',
'mysqli_master_query',
'mysqli_param_count',
'mysqli_report',
'mysqli_rpl_parse_enabled',
'mysqli_rpl_probe',
'mysqli_rpl_query_type',
'mysqli_send_long_data',
'mysqli_send_query',
'mysqli_set_opt',
'mysqli_slave_query'],
'Apache': ['apache_child_terminate',
'apache_get_modules',
'apache_get_version',
'apache_getenv',
'apache_lookup_uri',
'apache_note',
'apache_request_headers',
'apache_reset_timeout',
'apache_response_headers',
'apache_setenv',
'getallheaders',
'virtual'],
'Array': ['array_change_key_case',
'array_chunk',
'array_combine',
'array_count_values',
'array_diff_assoc',
'array_diff_key',
'array_diff_uassoc',
'array_diff_ukey',
'array_diff',
'array_fill_keys',
'array_fill',
'array_filter',
'array_flip',
'array_intersect_assoc',
'array_intersect_key',
'array_intersect_uassoc',
'array_intersect_ukey',
'array_intersect',
'array_key_exists',
'array_keys',
'array_map',
'array_merge_recursive',
'array_merge',
'array_multisort',
'array_pad',
'array_pop',
'array_product',
'array_push',
'array_rand',
'array_reduce',
'array_replace_recursive',
'array_replace',
'array_reverse',
'array_search',
'array_shift',
'array_slice',
'array_splice',
'array_sum',
'array_udiff_assoc',
'array_udiff_uassoc',
'array_udiff',
'array_uintersect_assoc',
'array_uintersect_uassoc',
'array_uintersect',
'array_unique',
'array_unshift',
'array_values',
'array_walk_recursive',
'array_walk',
'array',
'arsort',
'asort',
'compact',
'count',
'current',
'each',
'end',
'extract',
'in_array',
'key',
'krsort',
'ksort',
'list',
'natcasesort',
'natsort',
'next',
'pos',
'prev',
'range',
'reset',
'rsort',
'shuffle',
'sizeof',
'sort',
'uasort',
'uksort',
'usort'],
'BBCode': ['bbcode_add_element',
'bbcode_add_smiley',
'bbcode_create',
'bbcode_destroy',
'bbcode_parse',
'bbcode_set_arg_parser',
'bbcode_set_flags'],
'BC Math': ['bcadd',
'bccomp',
'bcdiv',
'bcmod',
'bcmul',
'bcpow',
'bcpowmod',
'bcscale',
'bcsqrt',
'bcsub'],
'Bzip2': ['bzclose',
'bzcompress',
'bzdecompress',
'bzerrno',
'bzerror',
'bzerrstr',
'bzflush',
'bzopen',
'bzread',
'bzwrite'],
'COM': ['com_addref',
'com_create_guid',
'com_event_sink',
'com_get_active_object',
'com_get',
'com_invoke',
'com_isenum',
'com_load_typelib',
'com_load',
'com_message_pump',
'com_print_typeinfo',
'com_propget',
'com_propput',
'com_propset',
'com_release',
'com_set',
'variant_abs',
'variant_add',
'variant_and',
'variant_cast',
'variant_cat',
'variant_cmp',
'variant_date_from_timestamp',
'variant_date_to_timestamp',
'variant_div',
'variant_eqv',
'variant_fix',
'variant_get_type',
'variant_idiv',
'variant_imp',
'variant_int',
'variant_mod',
'variant_mul',
'variant_neg',
'variant_not',
'variant_or',
'variant_pow',
'variant_round',
'variant_set_type',
'variant_set',
'variant_sub',
'variant_xor'],
'CUBRID': ['cubrid_affected_rows',
'cubrid_bind',
'cubrid_close_prepare',
'cubrid_close_request',
'cubrid_col_get',
'cubrid_col_size',
'cubrid_column_names',
'cubrid_column_types',
'cubrid_commit',
'cubrid_connect_with_url',
'cubrid_connect',
'cubrid_current_oid',
'cubrid_disconnect',
'cubrid_drop',
'cubrid_error_code_facility',
'cubrid_error_code',
'cubrid_error_msg',
'cubrid_execute',
'cubrid_fetch',
'cubrid_free_result',
'cubrid_get_charset',
'cubrid_get_class_name',
'cubrid_get_client_info',
'cubrid_get_db_parameter',
'cubrid_get_server_info',
'cubrid_get',
'cubrid_insert_id',
'cubrid_is_instance',
'cubrid_lob_close',
'cubrid_lob_export',
'cubrid_lob_get',
'cubrid_lob_send',
'cubrid_lob_size',
'cubrid_lock_read',
'cubrid_lock_write',
'cubrid_move_cursor',
'cubrid_num_cols',
'cubrid_num_rows',
'cubrid_prepare',
'cubrid_put',
'cubrid_rollback',
'cubrid_schema',
'cubrid_seq_drop',
'cubrid_seq_insert',
'cubrid_seq_put',
'cubrid_set_add',
'cubrid_set_drop',
'cubrid_version'],
'Cairo': ['cairo_create',
'cairo_font_face_get_type',
'cairo_font_face_status',
'cairo_font_options_create',
'cairo_font_options_equal',
'cairo_font_options_get_antialias',
'cairo_font_options_get_hint_metrics',
'cairo_font_options_get_hint_style',
'cairo_font_options_get_subpixel_order',
'cairo_font_options_hash',
'cairo_font_options_merge',
'cairo_font_options_set_antialias',
'cairo_font_options_set_hint_metrics',
'cairo_font_options_set_hint_style',
'cairo_font_options_set_subpixel_order',
'cairo_font_options_status',
'cairo_format_stride_for_width',
'cairo_image_surface_create_for_data',
'cairo_image_surface_create_from_png',
'cairo_image_surface_create',
'cairo_image_surface_get_data',
'cairo_image_surface_get_format',
'cairo_image_surface_get_height',
'cairo_image_surface_get_stride',
'cairo_image_surface_get_width',
'cairo_matrix_create_scale',
'cairo_matrix_create_translate',
'cairo_matrix_invert',
'cairo_matrix_multiply',
'cairo_matrix_rotate',
'cairo_matrix_transform_distance',
'cairo_matrix_transform_point',
'cairo_matrix_translate',
'cairo_pattern_add_color_stop_rgb',
'cairo_pattern_add_color_stop_rgba',
'cairo_pattern_create_for_surface',
'cairo_pattern_create_linear',
'cairo_pattern_create_radial',
'cairo_pattern_create_rgb',
'cairo_pattern_create_rgba',
'cairo_pattern_get_color_stop_count',
'cairo_pattern_get_color_stop_rgba',
'cairo_pattern_get_extend',
'cairo_pattern_get_filter',
'cairo_pattern_get_linear_points',
'cairo_pattern_get_matrix',
'cairo_pattern_get_radial_circles',
'cairo_pattern_get_rgba',
'cairo_pattern_get_surface',
'cairo_pattern_get_type',
'cairo_pattern_set_extend',
'cairo_pattern_set_filter',
'cairo_pattern_set_matrix',
'cairo_pattern_status',
'cairo_pdf_surface_create',
'cairo_pdf_surface_set_size',
'cairo_ps_get_levels',
'cairo_ps_level_to_string',
'cairo_ps_surface_create',
'cairo_ps_surface_dsc_begin_page_setup',
'cairo_ps_surface_dsc_begin_setup',
'cairo_ps_surface_dsc_comment',
'cairo_ps_surface_get_eps',
'cairo_ps_surface_restrict_to_level',
'cairo_ps_surface_set_eps',
'cairo_ps_surface_set_size',
'cairo_scaled_font_create',
'cairo_scaled_font_extents',
'cairo_scaled_font_get_ctm',
'cairo_scaled_font_get_font_face',
'cairo_scaled_font_get_font_matrix',
'cairo_scaled_font_get_font_options',
'cairo_scaled_font_get_scale_matrix',
'cairo_scaled_font_get_type',
'cairo_scaled_font_glyph_extents',
'cairo_scaled_font_status',
'cairo_scaled_font_text_extents',
'cairo_surface_copy_page',
'cairo_surface_create_similar',
'cairo_surface_finish',
'cairo_surface_flush',
'cairo_surface_get_content',
'cairo_surface_get_device_offset',
'cairo_surface_get_font_options',
'cairo_surface_get_type',
'cairo_surface_mark_dirty_rectangle',
'cairo_surface_mark_dirty',
'cairo_surface_set_device_offset',
'cairo_surface_set_fallback_resolution',
'cairo_surface_show_page',
'cairo_surface_status',
'cairo_surface_write_to_png',
'cairo_svg_surface_create',
'cairo_svg_surface_restrict_to_version',
'cairo_svg_version_to_string'],
'Calendar': ['cal_days_in_month',
'cal_from_jd',
'cal_info',
'cal_to_jd',
'easter_date',
'easter_days',
'FrenchToJD',
'GregorianToJD',
'JDDayOfWeek',
'JDMonthName',
'JDToFrench',
'JDToGregorian',
'jdtojewish',
'JDToJulian',
'jdtounix',
'JewishToJD',
'JulianToJD',
'unixtojd'],
'Classes/Object': ['call_user_method_array',
'call_user_method',
'class_alias',
'class_exists',
'get_called_class',
'get_class_methods',
'get_class_vars',
'get_class',
'get_declared_classes',
'get_declared_interfaces',
'get_object_vars',
'get_parent_class',
'interface_exists',
'is_a',
'is_subclass_of',
'method_exists',
'property_exists'],
'Classkit': ['classkit_import',
'classkit_method_add',
'classkit_method_copy',
'classkit_method_redefine',
'classkit_method_remove',
'classkit_method_rename'],
'Crack': ['crack_check',
'crack_closedict',
'crack_getlastmessage',
'crack_opendict'],
'Ctype': ['ctype_alnum',
'ctype_alpha',
'ctype_cntrl',
'ctype_digit',
'ctype_graph',
'ctype_lower',
'ctype_print',
'ctype_punct'],
'Cyrus': ['cyrus_authenticate',
'cyrus_bind',
'cyrus_close',
'cyrus_connect',
'cyrus_query',
'cyrus_unbind'],
'DB++': ['dbplus_add',
'dbplus_aql',
'dbplus_chdir',
'dbplus_close',
'dbplus_curr',
'dbplus_errcode',
'dbplus_errno',
'dbplus_find',
'dbplus_first',
'dbplus_flush',
'dbplus_freealllocks',
'dbplus_freelock',
'dbplus_freerlocks',
'dbplus_getlock',
'dbplus_getunique',
'dbplus_info',
'dbplus_last',
'dbplus_lockrel',
'dbplus_next',
'dbplus_open',
'dbplus_prev',
'dbplus_rchperm',
'dbplus_rcreate',
'dbplus_rcrtexact',
'dbplus_rcrtlike',
'dbplus_resolve',
'dbplus_restorepos',
'dbplus_rkeys',
'dbplus_ropen',
'dbplus_rquery',
'dbplus_rrename',
'dbplus_rsecindex',
'dbplus_runlink',
'dbplus_rzap',
'dbplus_savepos',
'dbplus_setindex',
'dbplus_setindexbynumber',
'dbplus_sql',
'dbplus_tcl',
'dbplus_tremove',
'dbplus_undo',
'dbplus_undoprepare',
'dbplus_unlockrel',
'dbplus_unselect',
'dbplus_update',
'dbplus_xlockrel',
'dbplus_xunlockrel'],
'DBA': ['dba_close',
'dba_delete',
'dba_exists',
'dba_fetch',
'dba_firstkey',
'dba_handlers',
'dba_insert',
'dba_key_split',
'dba_list',
'dba_nextkey',
'dba_open',
'dba_optimize',
'dba_popen',
'dba_replace',
'dba_sync'],
'DOM': ['dom_import_simplexml'],
'DOM XML (PHP 4)': ['domxml_new_doc',
'domxml_open_file',
'domxml_open_mem',
'domxml_version',
'domxml_xmltree',
'domxml_xslt_stylesheet_doc',
'domxml_xslt_stylesheet_file',
'domxml_xslt_stylesheet',
'domxml_xslt_version',
'xpath_eval_expression',
'xpath_eval',
'xpath_new_context',
'xpath_register_ns_auto',
'xpath_register_ns',
'xptr_eval',
'xptr_new_context'],
'Date/Time': ['checkdate',
'date_add',
'date_create_from_format',
'date_create',
'date_date_set',
'date_default_timezone_get',
'date_default_timezone_set',
'date_diff',
'date_format',
'date_get_last_errors',
'date_interval_create_from_date_string',
'date_interval_format',
'date_isodate_set',
'date_modify',
'date_offset_get',
'date_parse_from_format',
'date_parse',
'date_sub',
'date_sun_info',
'date_sunrise',
'date_sunset',
'date_time_set',
'date_timestamp_get',
'date_timestamp_set',
'date_timezone_get',
'date_timezone_set',
'date',
'getdate',
'gettimeofday',
'gmdate',
'gmmktime',
'gmstrftime',
'idate',
'localtime',
'microtime',
'mktime',
'strftime',
'strptime',
'strtotime',
'time',
'timezone_abbreviations_list',
'timezone_identifiers_list',
'timezone_location_get',
'timezone_name_from_abbr',
'timezone_name_get',
'timezone_offset_get',
'timezone_open',
'timezone_transitions_get',
'timezone_version_get'],
'Direct IO': ['dio_close', 'dio_fcntl', 'dio_open'],
'Directory': ['chdir',
'chroot',
'closedir',
'getcwd',
'opendir',
'readdir',
'rewinddir',
'scandir'],
'Enchant': ['enchant_broker_describe',
'enchant_broker_dict_exists',
'enchant_broker_free_dict',
'enchant_broker_free',
'enchant_broker_get_error',
'enchant_broker_init',
'enchant_broker_list_dicts',
'enchant_broker_request_dict',
'enchant_broker_request_pwl_dict',
'enchant_broker_set_ordering',
'enchant_dict_add_to_personal',
'enchant_dict_add_to_session',
'enchant_dict_check',
'enchant_dict_describe',
'enchant_dict_get_error',
'enchant_dict_is_in_session',
'enchant_dict_quick_check',
'enchant_dict_store_replacement',
'enchant_dict_suggest'],
'Error Handling': ['debug_backtrace',
'debug_print_backtrace',
'error_get_last',
'error_log',
'error_reporting',
'restore_error_handler',
'restore_exception_handler',
'set_error_handler',
'set_exception_handler',
'trigger_error',
'user_error'],
'Exif': ['exif_imagetype',
'exif_read_data',
'exif_tagname',
'exif_thumbnail',
'read_exif_data'],
'Expect': ['expect_expectl'],
'FAM': ['fam_cancel_monitor',
'fam_close',
'fam_monitor_collection',
'fam_monitor_directory',
'fam_monitor_file',
'fam_next_event',
'fam_open',
'fam_pending',
'fam_resume_monitor',
'fam_suspend_monitor'],
'FDF': ['fdf_add_doc_javascript',
'fdf_add_template',
'fdf_close',
'fdf_create',
'fdf_enum_values',
'fdf_errno',
'fdf_error',
'fdf_get_ap',
'fdf_get_attachment',
'fdf_get_encoding',
'fdf_get_file',
'fdf_get_flags',
'fdf_get_opt',
'fdf_get_status',
'fdf_get_value',
'fdf_get_version',
'fdf_header',
'fdf_next_field_name',
'fdf_open_string',
'fdf_open',
'fdf_remove_item',
'fdf_save_string',
'fdf_save',
'fdf_set_ap',
'fdf_set_encoding',
'fdf_set_file',
'fdf_set_flags',
'fdf_set_javascript_action',
'fdf_set_on_import_javascript',
'fdf_set_opt',
'fdf_set_status',
'fdf_set_submit_form_action',
'fdf_set_target_frame',
'fdf_set_value',
'fdf_set_version'],
'FTP': ['ftp_alloc',
'ftp_cdup',
'ftp_chdir',
'ftp_chmod',
'ftp_close',
'ftp_connect',
'ftp_delete',
'ftp_exec',
'ftp_fget',
'ftp_fput',
'ftp_get_option',
'ftp_get',
'ftp_login',
'ftp_mdtm',
'ftp_mkdir',
'ftp_nb_continue',
'ftp_nb_fget',
'ftp_nb_fput',
'ftp_nb_get',
'ftp_nb_put',
'ftp_nlist',
'ftp_pasv',
'ftp_put',
'ftp_pwd',
'ftp_quit',
'ftp_raw',
'ftp_rawlist',
'ftp_rename',
'ftp_rmdir',
'ftp_set_option',
'ftp_site',
'ftp_size',
'ftp_ssl_connect',
'ftp_systype'],
'Fileinfo': ['finfo_buffer',
'finfo_close',
'finfo_file',
'finfo_open',
'finfo_set_flags',
'mime_content_type'],
'Filesystem': ['basename',
'chgrp',
'chmod',
'chown',
'clearstatcache',
'copy',
'dirname',
'disk_free_space',
'disk_total_space',
'diskfreespace',
'fclose',
'feof',
'fflush',
'fgetc',
'fgetcsv',
'fgets',
'fgetss',
'file_exists',
'file_get_contents',
'file_put_contents',
'file',
'fileatime',
'filectime',
'filegroup',
'fileinode',
'filemtime',
'fileowner',
'fileperms',
'filesize',
'filetype',
'flock',
'fnmatch',
'fopen',
'fpassthru',
'fputcsv',
'fputs',
'fread',
'fscanf',
'fseek',
'fstat',
'ftell',
'ftruncate',
'fwrite',
'glob',
'is_dir',
'is_executable',
'is_file',
'is_link',
'is_readable',
'is_uploaded_file',
'is_writable',
'is_writeable',
'lchgrp',
'lchown',
'link',
'linkinfo',
'lstat',
'mkdir',
'move_uploaded_file',
'parse_ini_file',
'parse_ini_string',
'pathinfo',
'pclose',
'popen',
'readfile',
'readlink',
'realpath_cache_get',
'realpath_cache_size',
'realpath',
'rename',
'rewind',
'rmdir',
'set_file_buffer',
'stat',
'symlink',
'tempnam',
'tmpfile',
'touch',
'umask',
'unlink'],
'Filter': ['filter_has_var',
'filter_id',
'filter_input_array',
'filter_input',
'filter_list',
'filter_var_array',
'filter_var'],
'Firebird/InterBase': ['ibase_add_user',
'ibase_affected_rows',
'ibase_backup',
'ibase_blob_add',
'ibase_blob_cancel',
'ibase_blob_close',
'ibase_blob_create',
'ibase_blob_echo',
'ibase_blob_get',
'ibase_blob_import',
'ibase_blob_info',
'ibase_blob_open',
'ibase_close',
'ibase_commit_ret',
'ibase_commit',
'ibase_connect',
'ibase_db_info',
'ibase_delete_user',
'ibase_drop_db',
'ibase_errcode',
'ibase_errmsg',
'ibase_execute',
'ibase_fetch_assoc',
'ibase_fetch_object',
'ibase_fetch_row',
'ibase_field_info',
'ibase_free_event_handler',
'ibase_free_query',
'ibase_free_result',
'ibase_gen_id',
'ibase_maintain_db',
'ibase_modify_user',
'ibase_name_result',
'ibase_num_fields',
'ibase_num_params',
'ibase_param_info',
'ibase_pconnect',
'ibase_prepare',
'ibase_query',
'ibase_restore',
'ibase_rollback_ret',
'ibase_rollback',
'ibase_server_info',
'ibase_service_attach',
'ibase_service_detach',
'ibase_set_event_handler',
'ibase_timefmt',
'ibase_trans',
'ibase_wait_event'],
'FriBiDi': ['fribidi_log2vis'],
'FrontBase': ['fbsql_affected_rows',
'fbsql_autocommit',
'fbsql_blob_size',
'fbsql_change_user',
'fbsql_clob_size',
'fbsql_close',
'fbsql_commit',
'fbsql_connect',
'fbsql_create_blob',
'fbsql_create_clob',
'fbsql_create_db',
'fbsql_data_seek',
'fbsql_database_password',
'fbsql_database',
'fbsql_db_query',
'fbsql_db_status',
'fbsql_drop_db',
'fbsql_errno',
'fbsql_error',
'fbsql_fetch_array',
'fbsql_fetch_assoc',
'fbsql_fetch_field',
'fbsql_fetch_lengths',
'fbsql_fetch_object',
'fbsql_fetch_row',
'fbsql_field_flags',
'fbsql_field_len',
'fbsql_field_name',
'fbsql_field_seek',
'fbsql_field_table',
'fbsql_field_type',
'fbsql_free_result',
'fbsql_get_autostart_info',
'fbsql_hostname',
'fbsql_insert_id',
'fbsql_list_dbs',
'fbsql_list_fields',
'fbsql_list_tables',
'fbsql_next_result',
'fbsql_num_fields',
'fbsql_num_rows',
'fbsql_password',
'fbsql_pconnect',
'fbsql_query',
'fbsql_read_blob',
'fbsql_read_clob',
'fbsql_result',
'fbsql_rollback',
'fbsql_rows_fetched',
'fbsql_select_db',
'fbsql_set_characterset',
'fbsql_set_lob_mode',
'fbsql_set_password',
'fbsql_set_transaction',
'fbsql_start_db',
'fbsql_stop_db',
'fbsql_table_name',
'fbsql_tablename',
'fbsql_username',
'fbsql_warnings'],
'Function handling': ['call_user_func_array',
'call_user_func',
'create_function',
'forward_static_call_array',
'forward_static_call',
'func_get_arg',
'func_get_args',
'func_num_args',
'function_exists',
'get_defined_functions',
'register_shutdown_function',
'register_tick_function',
'unregister_tick_function'],
'GD and Image': ['gd_info',
'getimagesize',
'image_type_to_extension',
'image_type_to_mime_type'],
'GMP': ['gmp_abs',
'gmp_add',
'gmp_and',
'gmp_clrbit',
'gmp_cmp',
'gmp_com',
'gmp_div_q',
'gmp_div_qr',
'gmp_div_r',
'gmp_div',
'gmp_divexact',
'gmp_fact',
'gmp_gcd',
'gmp_gcdext',
'gmp_hamdist',
'gmp_init',
'gmp_intval',
'gmp_invert',
'gmp_jacobi',
'gmp_legendre',
'gmp_mod',
'gmp_mul',
'gmp_neg',
'gmp_nextprime',
'gmp_or',
'gmp_perfect_square',
'gmp_popcount',
'gmp_pow',
'gmp_powm',
'gmp_prob_prime',
'gmp_random',
'gmp_scan0',
'gmp_scan1',
'gmp_setbit',
'gmp_sign',
'gmp_sqrt',
'gmp_sqrtrem',
'gmp_strval',
'gmp_sub',
'gmp_testbit',
'gmp_xor'],
'GeoIP': ['geoip_continent_code_by_name',
'geoip_country_code_by_name',
'geoip_country_code3_by_name',
'geoip_country_name_by_name',
'geoip_database_info',
'geoip_db_avail',
'geoip_db_filename',
'geoip_db_get_all_info',
'geoip_id_by_name',
'geoip_isp_by_name',
'geoip_org_by_name',
'geoip_record_by_name',
'geoip_region_by_name',
'geoip_region_name_by_code',
'geoip_time_zone_by_country_and_region'],
'Gettext': ['bind_textdomain_codeset',
'bindtextdomain',
'dcgettext',
'dcngettext',
'dgettext',
'dngettext',
'gettext',
'ngettext',
'textdomain'],
'GnuPG': ['gnupg_adddecryptkey',
'gnupg_addencryptkey',
'gnupg_addsignkey',
'gnupg_cleardecryptkeys',
'gnupg_clearencryptkeys',
'gnupg_clearsignkeys',
'gnupg_decrypt',
'gnupg_decryptverify',
'gnupg_encrypt',
'gnupg_encryptsign',
'gnupg_export',
'gnupg_geterror',
'gnupg_getprotocol',
'gnupg_import',
'gnupg_init',
'gnupg_keyinfo',
'gnupg_setarmor',
'gnupg_seterrormode',
'gnupg_setsignmode',
'gnupg_sign',
'gnupg_verify'],
'Gopher': ['gopher_parsedir'],
'Grapheme': ['grapheme_extract',
'grapheme_stripos',
'grapheme_stristr',
'grapheme_strlen',
'grapheme_strpos',
'grapheme_strripos',
'grapheme_strrpos',
'grapheme_strstr',
'grapheme_substr'],
'Gupnp': ['gupnp_context_get_host_ip',
'gupnp_context_get_port',
'gupnp_context_get_subscription_timeout',
'gupnp_context_host_path',
'gupnp_context_new',
'gupnp_context_set_subscription_timeout',
'gupnp_context_timeout_add',
'gupnp_context_unhost_path',
'gupnp_control_point_browse_start',
'gupnp_control_point_browse_stop',
'gupnp_control_point_callback_set',
'gupnp_control_point_new',
'gupnp_device_action_callback_set',
'gupnp_device_info_get_service',
'gupnp_device_info_get',
'gupnp_root_device_get_available',
'gupnp_root_device_get_relative_location',
'gupnp_root_device_new',
'gupnp_root_device_set_available',
'gupnp_root_device_start',
'gupnp_root_device_stop',
'gupnp_service_action_get',
'gupnp_service_action_return_error',
'gupnp_service_action_return',
'gupnp_service_action_set',
'gupnp_service_freeze_notify',
'gupnp_service_info_get_introspection',
'gupnp_service_info_get',
'gupnp_service_introspection_get_state_variable',
'gupnp_service_notify',
'gupnp_service_proxy_action_get',
'gupnp_service_proxy_action_set',
'gupnp_service_proxy_add_notify',
'gupnp_service_proxy_callback_set',
'gupnp_service_proxy_get_subscribed',
'gupnp_service_proxy_remove_notify',
'gupnp_service_proxy_set_subscribed',
'gupnp_service_thaw_notify'],
'HTTP': ['http_cache_etag',
'http_cache_last_modified',
'http_chunked_decode',
'http_deflate',
'http_inflate',
'http_build_cookie',
'http_date',
'http_get_request_body_stream',
'http_get_request_body',
'http_get_request_headers',
'http_match_etag',
'http_match_modified',
'http_match_request_header',
'http_support',
'http_negotiate_charset',
'http_negotiate_content_type',
'http_negotiate_language',
'ob_deflatehandler',
'ob_etaghandler',
'ob_inflatehandler',
'http_parse_cookie',
'http_parse_headers',
'http_parse_message',
'http_parse_params',
'http_persistent_handles_clean',
'http_persistent_handles_count',
'http_persistent_handles_ident',
'http_get',
'http_head',
'http_post_data',
'http_post_fields',
'http_put_data',
'http_put_file',
'http_put_stream',
'http_request_body_encode',
'http_request_method_exists',
'http_request_method_name',
'http_request_method_register',
'http_request_method_unregister',
'http_request',
'http_redirect',
'http_send_content_disposition',
'http_send_content_type',
'http_send_data',
'http_send_file',
'http_send_last_modified',
'http_send_status',
'http_send_stream',
'http_throttle',
'http_build_str',
'http_build_url'],
'Hash': ['hash_algos',
'hash_copy',
'hash_file',
'hash_final',
'hash_hmac_file',
'hash_hmac',
'hash_init',
'hash_update_file',
'hash_update_stream',
'hash_update',
'hash'],
'Hyperwave': ['hw_Array2Objrec',
'hw_changeobject',
'hw_Children',
'hw_ChildrenObj',
'hw_Close',
'hw_Connect',
'hw_connection_info',
'hw_cp',
'hw_Deleteobject',
'hw_DocByAnchor',
'hw_DocByAnchorObj',
'hw_Document_Attributes',
'hw_Document_BodyTag',
'hw_Document_Content',
'hw_Document_SetContent',
'hw_Document_Size',
'hw_dummy',
'hw_EditText',
'hw_Error',
'hw_ErrorMsg',
'hw_Free_Document',
'hw_GetAnchors',
'hw_GetAnchorsObj',
'hw_GetAndLock',
'hw_GetChildColl',
'hw_GetChildCollObj',
'hw_GetChildDocColl',
'hw_GetChildDocCollObj',
'hw_GetObject',
'hw_GetObjectByQuery',
'hw_GetObjectByQueryColl',
'hw_GetObjectByQueryCollObj',
'hw_GetObjectByQueryObj',
'hw_GetParents',
'hw_GetParentsObj',
'hw_getrellink',
'hw_GetRemote',
'hw_getremotechildren',
'hw_GetSrcByDestObj',
'hw_GetText',
'hw_getusername',
'hw_Identify',
'hw_InCollections',
'hw_Info',
'hw_InsColl',
'hw_InsDoc',
'hw_insertanchors',
'hw_InsertDocument',
'hw_InsertObject',
'hw_mapid',
'hw_Modifyobject',
'hw_mv',
'hw_New_Document',
'hw_objrec2array',
'hw_Output_Document',
'hw_pConnect',
'hw_PipeDocument',
'hw_Root',
'hw_setlinkroot',
'hw_stat',
'hw_Unlock',
'hw_Who'],
'Hyperwave API': ['hw_api_attribute',
'hwapi_hgcsp',
'hw_api_content',
'hw_api_object'],
'IBM DB2': ['db2_autocommit',
'db2_bind_param',
'db2_client_info',
'db2_close',
'db2_column_privileges',
'db2_columns',
'db2_commit',
'db2_conn_error',
'db2_conn_errormsg',
'db2_connect',
'db2_cursor_type',
'db2_escape_string',
'db2_exec',
'db2_execute',
'db2_fetch_array',
'db2_fetch_assoc',
'db2_fetch_both',
'db2_fetch_object',
'db2_fetch_row',
'db2_field_display_size',
'db2_field_name',
'db2_field_num',
'db2_field_precision',
'db2_field_scale',
'db2_field_type',
'db2_field_width',
'db2_foreign_keys',
'db2_free_result',
'db2_free_stmt',
'db2_get_option',
'db2_last_insert_id'],
'ID3': ['id3_get_frame_long_name',
'id3_get_frame_short_name',
'id3_get_genre_id',
'id3_get_genre_list',
'id3_get_genre_name',
'id3_get_tag',
'id3_get_version',
'id3_remove_tag',
'id3_set_tag'],
'IDN': ['idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'],
'IIS': ['iis_add_server',
'iis_get_dir_security',
'iis_get_script_map',
'iis_get_server_by_comment',
'iis_get_server_by_path',
'iis_get_server_rights',
'iis_get_service_state',
'iis_remove_server',
'iis_set_app_settings',
'iis_set_dir_security',
'iis_set_script_map',
'iis_set_server_rights',
'iis_start_server',
'iis_start_service',
'iis_stop_server',
'iis_stop_service'],
'IMAP': ['imap_8bit',
'imap_alerts',
'imap_append',
'imap_base64',
'imap_binary',
'imap_body',
'imap_bodystruct',
'imap_check',
'imap_clearflag_full',
'imap_close',
'imap_createmailbox',
'imap_delete',
'imap_deletemailbox',
'imap_errors',
'imap_expunge',
'imap_fetch_overview',
'imap_fetchbody',
'imap_fetchheader',
'imap_fetchmime',
'imap_fetchstructure',
'imap_gc',
'imap_get_quota',
'imap_get_quotaroot',
'imap_getacl',
'imap_getmailboxes',
'imap_getsubscribed',
'imap_header',
'imap_headerinfo',
'imap_headers',
'imap_last_error',
'imap_list',
'imap_listmailbox',
'imap_listscan',
'imap_listsubscribed',
'imap_lsub',
'imap_mail_compose',
'imap_mail_copy',
'imap_mail_move',
'imap_mail',
'imap_mailboxmsginfo',
'imap_mime_header_decode',
'imap_msgno',
'imap_num_msg',
'imap_num_recent',
'imap_open',
'imap_ping',
'imap_qprint',
'imap_renamemailbox',
'imap_reopen',
'imap_rfc822_parse_adrlist',
'imap_rfc822_parse_headers',
'imap_rfc822_write_address',
'imap_savebody',
'imap_scanmailbox',
'imap_search',
'imap_set_quota',
'imap_setacl',
'imap_setflag_full',
'imap_sort',
'imap_status',
'imap_subscribe',
'imap_thread',
'imap_timeout',
'imap_uid',
'imap_undelete',
'imap_unsubscribe',
'imap_utf7_decode',
'imap_utf7_encode',
'imap_utf8'],
'Informix': ['ifx_affected_rows',
'ifx_blobinfile_mode',
'ifx_byteasvarchar',
'ifx_close',
'ifx_connect',
'ifx_copy_blob',
'ifx_create_blob',
'ifx_create_char',
'ifx_do',
'ifx_error',
'ifx_errormsg',
'ifx_fetch_row',
'ifx_fieldproperties',
'ifx_fieldtypes',
'ifx_free_blob',
'ifx_free_char',
'ifx_free_result',
'ifx_get_blob',
'ifx_get_char',
'ifx_getsqlca',
'ifx_htmltbl_result',
'ifx_nullformat',
'ifx_num_fields',
'ifx_num_rows',
'ifx_pconnect',
'ifx_prepare',
'ifx_query',
'ifx_textasvarchar',
'ifx_update_blob',
'ifx_update_char',
'ifxus_close_slob',
'ifxus_create_slob',
'ifxus_free_slob',
'ifxus_open_slob',
'ifxus_read_slob',
'ifxus_seek_slob',
'ifxus_tell_slob',
'ifxus_write_slob'],
'Ingres': ['ingres_autocommit_state',
'ingres_autocommit',
'ingres_charset',
'ingres_close',
'ingres_commit',
'ingres_connect',
'ingres_cursor',
'ingres_errno',
'ingres_error',
'ingres_errsqlstate',
'ingres_escape_string',
'ingres_execute',
'ingres_fetch_array',
'ingres_fetch_assoc',
'ingres_fetch_object',
'ingres_fetch_proc_return',
'ingres_fetch_row',
'ingres_field_length',
'ingres_field_name',
'ingres_field_nullable',
'ingres_field_precision',
'ingres_field_scale',
'ingres_field_type',
'ingres_free_result',
'ingres_next_error',
'ingres_num_fields',
'ingres_num_rows',
'ingres_pconnect',
'ingres_prepare',
'ingres_query',
'ingres_result_seek',
'ingres_rollback',
'ingres_set_environment',
'ingres_unbuffered_query'],
'Inotify': ['inotify_add_watch',
'inotify_init',
'inotify_queue_len',
'inotify_read',
'inotify_rm_watch'],
'JSON': ['json_decode', 'json_encode', 'json_last_error'],
'Java': ['java_last_exception_clear', 'java_last_exception_get'],
'Judy': ['judy_type', 'judy_version'],
'KADM5': ['kadm5_chpass_principal',
'kadm5_create_principal',
'kadm5_delete_principal',
'kadm5_destroy',
'kadm5_flush',
'kadm5_get_policies',
'kadm5_get_principal',
'kadm5_get_principals',
'kadm5_init_with_password',
'kadm5_modify_principal'],
'LDAP': ['ldap_8859_to_t61',
'ldap_add',
'ldap_bind',
'ldap_close',
'ldap_compare',
'ldap_connect',
'ldap_count_entries',
'ldap_delete',
'ldap_dn2ufn',
'ldap_err2str',
'ldap_errno',
'ldap_error',
'ldap_explode_dn',
'ldap_first_attribute',
'ldap_first_entry',
'ldap_first_reference',
'ldap_free_result',
'ldap_get_attributes',
'ldap_get_dn',
'ldap_get_entries',
'ldap_get_option',
'ldap_get_values_len',
'ldap_get_values',
'ldap_list',
'ldap_mod_add',
'ldap_mod_del',
'ldap_mod_replace',
'ldap_modify',
'ldap_next_attribute',
'ldap_next_entry',
'ldap_next_reference',
'ldap_parse_reference',
'ldap_parse_result',
'ldap_read',
'ldap_rename',
'ldap_sasl_bind',
'ldap_search',
'ldap_set_option',
'ldap_set_rebind_proc',
'ldap_sort',
'ldap_start_tls',
'ldap_t61_to_8859',
'ldap_unbind'],
'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'],
'Libevent': ['event_add',
'event_base_free',
'event_base_loop',
'event_base_loopbreak',
'event_base_loopexit',
'event_base_new',
'event_base_priority_init',
'event_base_set',
'event_buffer_base_set',
'event_buffer_disable',
'event_buffer_enable',
'event_buffer_fd_set',
'event_buffer_free',
'event_buffer_new',
'event_buffer_priority_set',
'event_buffer_read',
'event_buffer_set_callback',
'event_buffer_timeout_set',
'event_buffer_watermark_set',
'event_buffer_write',
'event_del',
'event_free',
'event_new',
'event_set'],
'Lotus Notes': ['notes_body',
'notes_copy_db',
'notes_create_db',
'notes_create_note',
'notes_drop_db',
'notes_find_note',
'notes_header_info',
'notes_list_msgs',
'notes_mark_read',
'notes_mark_unread',
'notes_nav_create',
'notes_search',
'notes_unread',
'notes_version'],
'MCVE': ['m_checkstatus',
'm_completeauthorizations',
'm_connect',
'm_connectionerror',
'm_deletetrans',
'm_destroyconn',
'm_destroyengine',
'm_getcell',
'm_getcellbynum',
'm_getcommadelimited',
'm_getheader',
'm_initconn',
'm_initengine',
'm_iscommadelimited',
'm_maxconntimeout',
'm_monitor',
'm_numcolumns',
'm_numrows',
'm_parsecommadelimited',
'm_responsekeys'],
'Mail': ['ezmlm_hash', 'mail'],
'Mailparse': ['mailparse_determine_best_xfer_encoding',
'mailparse_msg_create',
'mailparse_msg_extract_part_file',
'mailparse_msg_extract_part',
'mailparse_msg_extract_whole_part_file',
'mailparse_msg_free',
'mailparse_msg_get_part_data',
'mailparse_msg_get_part',
'mailparse_msg_get_structure',
'mailparse_msg_parse_file',
'mailparse_msg_parse',
'mailparse_rfc822_parse_addresses',
'mailparse_stream_encode',
'mailparse_uudecode_all'],
'Math': ['abs',
'acos',
'acosh',
'asin',
'asinh',
'atan2',
'atan',
'atanh',
'base_convert',
'bindec',
'ceil',
'cos',
'cosh',
'decbin',
'dechex',
'decoct',
'deg2rad',
'exp',
'expm1'],
'MaxDB': ['maxdb_affected_rows',
'maxdb_autocommit',
'maxdb_bind_param',
'maxdb_bind_result',
'maxdb_change_user',
'maxdb_character_set_name',
'maxdb_client_encoding',
'maxdb_close_long_data',
'maxdb_close',
'maxdb_commit',
'maxdb_connect_errno',
'maxdb_connect_error',
'maxdb_connect',
'maxdb_data_seek',
'maxdb_debug',
'maxdb_disable_reads_from_master',
'maxdb_disable_rpl_parse',
'maxdb_dump_debug_info',
'maxdb_embedded_connect',
'maxdb_enable_reads_from_master',
'maxdb_enable_rpl_parse',
'maxdb_errno',
'maxdb_error',
'maxdb_escape_string',
'maxdb_execute',
'maxdb_fetch_array',
'maxdb_fetch_assoc',
'maxdb_fetch_field_direct',
'maxdb_fetch_field',
'maxdb_fetch_fields',
'maxdb_fetch_lengths',
'maxdb_fetch_object',
'maxdb_fetch_row',
'maxdb_fetch',
'maxdb_field_count',
'maxdb_field_seek',
'maxdb_field_tell',
'maxdb_free_result',
'maxdb_get_client_info',
'maxdb_get_client_version',
'maxdb_get_host_info',
'maxdb_get_metadata',
'maxdb_get_proto_info',
'maxdb_get_server_info',
'maxdb_get_server_version',
'maxdb_info',
'maxdb_init',
'maxdb_insert_id',
'maxdb_kill',
'maxdb_master_query',
'maxdb_more_results',
'maxdb_multi_query',
'maxdb_next_result',
'maxdb_num_fields',
'maxdb_num_rows',
'maxdb_options',
'maxdb_param_count',
'maxdb_ping',
'maxdb_prepare',
'maxdb_query',
'maxdb_real_connect',
'maxdb_real_escape_string',
'maxdb_real_query',
'maxdb_report',
'maxdb_rollback',
'maxdb_rpl_parse_enabled',
'maxdb_rpl_probe',
'maxdb_rpl_query_type',
'maxdb_select_db',
'maxdb_send_long_data',
'maxdb_send_query',
'maxdb_server_end',
'maxdb_server_init',
'maxdb_set_opt',
'maxdb_sqlstate',
'maxdb_ssl_set',
'maxdb_stat',
'maxdb_stmt_affected_rows'],
'Mcrypt': ['mcrypt_cbc',
'mcrypt_cfb',
'mcrypt_create_iv',
'mcrypt_decrypt',
'mcrypt_ecb',
'mcrypt_enc_get_algorithms_name',
'mcrypt_enc_get_block_size',
'mcrypt_enc_get_iv_size',
'mcrypt_enc_get_key_size',
'mcrypt_enc_get_modes_name',
'mcrypt_enc_get_supported_key_sizes',
'mcrypt_enc_is_block_algorithm_mode',
'mcrypt_enc_is_block_algorithm',
'mcrypt_enc_is_block_mode',
'mcrypt_enc_self_test',
'mcrypt_encrypt',
'mcrypt_generic_deinit',
'mcrypt_generic_end',
'mcrypt_generic_init',
'mcrypt_generic',
'mcrypt_get_block_size',
'mcrypt_get_cipher_name',
'mcrypt_get_iv_size',
'mcrypt_get_key_size',
'mcrypt_list_algorithms',
'mcrypt_list_modes',
'mcrypt_module_close',
'mcrypt_module_get_algo_block_size',
'mcrypt_module_get_algo_key_size',
'mcrypt_module_get_supported_key_sizes',
'mcrypt_module_is_block_algorithm_mode',
'mcrypt_module_is_block_algorithm',
'mcrypt_module_is_block_mode',
'mcrypt_module_open',
'mcrypt_module_self_test',
'mcrypt_ofb',
'mdecrypt_generic'],
'Memcache': ['memcache_debug'],
'Mhash': ['mhash_count',
'mhash_get_block_size',
'mhash_get_hash_name',
'mhash_keygen_s2k',
'mhash'],
'Ming': ['ming_keypress',
'ming_setcubicthreshold',
'ming_setscale',
'ming_setswfcompression',
'ming_useconstants',
'ming_useswfversion'],
'Misc.': ['connection_aborted',
'connection_status',
'connection_timeout',
'constant',
'define',
'defined',
'die',
'eval',
'exit',
'get_browser',
'__halt_compiler',
'highlight_file',
'highlight_string',
'ignore_user_abort',
'pack',
'php_check_syntax',
'php_strip_whitespace',
'show_source',
'sleep',
'sys_getloadavg',
'time_nanosleep',
'time_sleep_until',
'uniqid',
'unpack',
'usleep'],
'Mongo': ['bson_decode', 'bson_encode'],
'Msession': ['msession_connect',
'msession_count',
'msession_create',
'msession_destroy',
'msession_disconnect',
'msession_find',
'msession_get_array',
'msession_get_data',
'msession_get',
'msession_inc',
'msession_list',
'msession_listvar',
'msession_lock',
'msession_plugin',
'msession_randstr',
'msession_set_array',
'msession_set_data',
'msession_set',
'msession_timeout',
'msession_uniq',
'msession_unlock'],
'Mssql': ['mssql_bind',
'mssql_close',
'mssql_connect',
'mssql_data_seek',
'mssql_execute',
'mssql_fetch_array',
'mssql_fetch_assoc',
'mssql_fetch_batch',
'mssql_fetch_field',
'mssql_fetch_object',
'mssql_fetch_row',
'mssql_field_length',
'mssql_field_name',
'mssql_field_seek',
'mssql_field_type',
'mssql_free_result',
'mssql_free_statement',
'mssql_get_last_message',
'mssql_guid_string',
'mssql_init',
'mssql_min_error_severity',
'mssql_min_message_severity',
'mssql_next_result',
'mssql_num_fields',
'mssql_num_rows',
'mssql_pconnect',
'mssql_query',
'mssql_result',
'mssql_rows_affected',
'mssql_select_db'],
'Multibyte String': ['mb_check_encoding',
'mb_convert_case',
'mb_convert_encoding',
'mb_convert_kana',
'mb_convert_variables',
'mb_decode_mimeheader',
'mb_decode_numericentity',
'mb_detect_encoding',
'mb_detect_order',
'mb_encode_mimeheader',
'mb_encode_numericentity',
'mb_encoding_aliases',
'mb_ereg_match',
'mb_ereg_replace',
'mb_ereg_search_getpos',
'mb_ereg_search_getregs',
'mb_ereg_search_init',
'mb_ereg_search_pos',
'mb_ereg_search_regs',
'mb_ereg_search_setpos',
'mb_ereg_search',
'mb_ereg',
'mb_eregi_replace',
'mb_eregi',
'mb_get_info',
'mb_http_input',
'mb_http_output',
'mb_internal_encoding',
'mb_language',
'mb_list_encodings',
'mb_output_handler',
'mb_parse_str',
'mb_preferred_mime_name',
'mb_regex_encoding',
'mb_regex_set_options',
'mb_send_mail',
'mb_split',
'mb_strcut',
'mb_strimwidth',
'mb_stripos',
'mb_stristr',
'mb_strlen',
'mb_strpos',
'mb_strrchr',
'mb_strrichr',
'mb_strripos',
'mb_strrpos',
'mb_strstr',
'mb_strtolower',
'mb_strtoupper',
'mb_strwidth',
'mb_substitute_character',
'mb_substr_count',
'mb_substr'],
'MySQL': ['mysql_affected_rows',
'mysql_client_encoding',
'mysql_close',
'mysql_connect',
'mysql_create_db',
'mysql_data_seek',
'mysql_db_name',
'mysql_db_query',
'mysql_drop_db',
'mysql_errno',
'mysql_error',
'mysql_escape_string',
'mysql_fetch_array',
'mysql_fetch_assoc',
'mysql_fetch_field',
'mysql_fetch_lengths',
'mysql_fetch_object',
'mysql_fetch_row',
'mysql_field_flags',
'mysql_field_len',
'mysql_field_name',
'mysql_field_seek',
'mysql_field_table',
'mysql_field_type',
'mysql_free_result',
'mysql_get_client_info',
'mysql_get_host_info',
'mysql_get_proto_info',
'mysql_get_server_info',
'mysql_info',
'mysql_insert_id',
'mysql_list_dbs',
'mysql_list_fields',
'mysql_list_processes',
'mysql_list_tables',
'mysql_num_fields',
'mysql_num_rows',
'mysql_pconnect',
'mysql_ping',
'mysql_query',
'mysql_real_escape_string',
'mysql_result',
'mysql_select_db',
'mysql_set_charset',
'mysql_stat',
'mysql_tablename',
'mysql_thread_id',
'mysql_unbuffered_query'],
'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'],
'Ncurses': ['ncurses_addch',
'ncurses_addchnstr',
'ncurses_addchstr',
'ncurses_addnstr',
'ncurses_addstr',
'ncurses_assume_default_colors',
'ncurses_attroff',
'ncurses_attron',
'ncurses_attrset',
'ncurses_baudrate',
'ncurses_beep',
'ncurses_bkgd',
'ncurses_bkgdset',
'ncurses_border',
'ncurses_bottom_panel',
'ncurses_can_change_color',
'ncurses_cbreak',
'ncurses_clear',
'ncurses_clrtobot',
'ncurses_clrtoeol',
'ncurses_color_content',
'ncurses_color_set',
'ncurses_curs_set',
'ncurses_def_prog_mode',
'ncurses_def_shell_mode',
'ncurses_define_key',
'ncurses_del_panel',
'ncurses_delay_output',
'ncurses_delch',
'ncurses_deleteln',
'ncurses_delwin',
'ncurses_doupdate',
'ncurses_echo',
'ncurses_echochar',
'ncurses_end',
'ncurses_erase',
'ncurses_erasechar',
'ncurses_filter',
'ncurses_flash',
'ncurses_flushinp',
'ncurses_getch',
'ncurses_getmaxyx',
'ncurses_getmouse',
'ncurses_getyx',
'ncurses_halfdelay',
'ncurses_has_colors',
'ncurses_has_ic',
'ncurses_has_il',
'ncurses_has_key',
'ncurses_hide_panel',
'ncurses_hline',
'ncurses_inch',
'ncurses_init_color',
'ncurses_init_pair',
'ncurses_init',
'ncurses_insch',
'ncurses_insdelln',
'ncurses_insertln',
'ncurses_insstr',
'ncurses_instr',
'ncurses_isendwin',
'ncurses_keyok',
'ncurses_keypad',
'ncurses_killchar',
'ncurses_longname',
'ncurses_meta',
'ncurses_mouse_trafo',
'ncurses_mouseinterval',
'ncurses_mousemask',
'ncurses_move_panel',
'ncurses_move',
'ncurses_mvaddch',
'ncurses_mvaddchnstr',
'ncurses_mvaddchstr',
'ncurses_mvaddnstr',
'ncurses_mvaddstr',
'ncurses_mvcur',
'ncurses_mvdelch',
'ncurses_mvgetch',
'ncurses_mvhline',
'ncurses_mvinch',
'ncurses_mvvline',
'ncurses_mvwaddstr',
'ncurses_napms',
'ncurses_new_panel',
'ncurses_newpad',
'ncurses_newwin',
'ncurses_nl',
'ncurses_nocbreak',
'ncurses_noecho',
'ncurses_nonl',
'ncurses_noqiflush',
'ncurses_noraw',
'ncurses_pair_content',
'ncurses_panel_above',
'ncurses_panel_below',
'ncurses_panel_window',
'ncurses_pnoutrefresh',
'ncurses_prefresh',
'ncurses_putp',
'ncurses_qiflush',
'ncurses_raw',
'ncurses_refresh',
'ncurses_replace_panel',
'ncurses_reset_prog_mode',
'ncurses_reset_shell_mode',
'ncurses_resetty',
'ncurses_savetty',
'ncurses_scr_dump',
'ncurses_scr_init',
'ncurses_scr_restore',
'ncurses_scr_set',
'ncurses_scrl',
'ncurses_show_panel',
'ncurses_slk_attr',
'ncurses_slk_attroff',
'ncurses_slk_attron',
'ncurses_slk_attrset',
'ncurses_slk_clear',
'ncurses_slk_color',
'ncurses_slk_init',
'ncurses_slk_noutrefresh',
'ncurses_slk_refresh',
'ncurses_slk_restore',
'ncurses_slk_set',
'ncurses_slk_touch',
'ncurses_standend',
'ncurses_standout',
'ncurses_start_color',
'ncurses_termattrs',
'ncurses_termname',
'ncurses_timeout',
'ncurses_top_panel',
'ncurses_typeahead',
'ncurses_ungetch',
'ncurses_ungetmouse',
'ncurses_update_panels',
'ncurses_use_default_colors',
'ncurses_use_env',
'ncurses_use_extended_names',
'ncurses_vidattr',
'ncurses_vline',
'ncurses_waddch',
'ncurses_waddstr',
'ncurses_wattroff',
'ncurses_wattron',
'ncurses_wattrset',
'ncurses_wborder',
'ncurses_wclear',
'ncurses_wcolor_set',
'ncurses_werase',
'ncurses_wgetch',
'ncurses_whline',
'ncurses_wmouse_trafo',
'ncurses_wmove',
'ncurses_wnoutrefresh',
'ncurses_wrefresh',
'ncurses_wstandend',
'ncurses_wstandout',
'ncurses_wvline'],
'Network': ['checkdnsrr',
'closelog',
'define_syslog_variables',
'dns_check_record',
'dns_get_mx',
'dns_get_record',
'fsockopen',
'gethostbyaddr',
'gethostbyname',
'gethostbynamel'],
'Newt': ['newt_bell',
'newt_button_bar',
'newt_button',
'newt_centered_window',
'newt_checkbox_get_value',
'newt_checkbox_set_flags',
'newt_checkbox_set_value',
'newt_checkbox_tree_add_item',
'newt_checkbox_tree_find_item',
'newt_checkbox_tree_get_current',
'newt_checkbox_tree_get_entry_value',
'newt_checkbox_tree_get_multi_selection',
'newt_checkbox_tree_get_selection',
'newt_checkbox_tree_multi',
'newt_checkbox_tree_set_current',
'newt_checkbox_tree_set_entry_value',
'newt_checkbox_tree_set_entry',
'newt_checkbox_tree_set_width',
'newt_checkbox_tree',
'newt_checkbox',
'newt_clear_key_buffer'],
'OAuth': ['oauth_get_sbs', 'oauth_urlencode'],
'OCI8': ['oci_bind_array_by_name',
'oci_bind_by_name',
'oci_cancel',
'oci_close',
'oci_commit',
'oci_connect',
'oci_define_by_name',
'oci_error',
'oci_execute',
'oci_fetch_all',
'oci_fetch_array',
'oci_fetch_assoc',
'oci_fetch_object',
'oci_fetch_row',
'oci_fetch',
'oci_field_is_null',
'oci_field_name',
'oci_field_precision',
'oci_field_scale',
'oci_field_size',
'oci_field_type_raw',
'oci_field_type',
'oci_free_statement',
'oci_internal_debug',
'oci_lob_copy',
'oci_lob_is_equal',
'oci_new_collection',
'oci_new_connect',
'oci_new_cursor',
'oci_new_descriptor',
'oci_num_fields',
'oci_num_rows',
'oci_parse',
'oci_password_change',
'oci_pconnect',
'oci_result',
'oci_rollback',
'oci_server_version',
'oci_set_action',
'oci_set_client_identifier',
'oci_set_client_info',
'oci_set_edition',
'oci_set_module_name',
'oci_set_prefetch',
'oci_statement_type'],
'ODBC': ['odbc_autocommit',
'odbc_binmode',
'odbc_close_all',
'odbc_close',
'odbc_columnprivileges',
'odbc_columns',
'odbc_commit',
'odbc_connect',
'odbc_cursor',
'odbc_data_source',
'odbc_do',
'odbc_error',
'odbc_errormsg',
'odbc_exec',
'odbc_execute',
'odbc_fetch_array',
'odbc_fetch_into',
'odbc_fetch_object',
'odbc_fetch_row',
'odbc_field_len',
'odbc_field_name',
'odbc_field_num',
'odbc_field_precision',
'odbc_field_scale',
'odbc_field_type',
'odbc_foreignkeys',
'odbc_free_result',
'odbc_gettypeinfo',
'odbc_longreadlen',
'odbc_next_result',
'odbc_num_fields',
'odbc_num_rows',
'odbc_pconnect',
'odbc_prepare',
'odbc_primarykeys',
'odbc_procedurecolumns',
'odbc_procedures',
'odbc_result_all',
'odbc_result',
'odbc_rollback',
'odbc_setoption',
'odbc_specialcolumns',
'odbc_statistics',
'odbc_tableprivileges',
'odbc_tables'],
'Object Aggregation': ['aggregate_info',
'aggregate_methods_by_list',
'aggregate_methods_by_regexp'],
'Object overloading': ['overload'],
'OpenAL': ['openal_buffer_create',
'openal_buffer_data',
'openal_buffer_destroy',
'openal_buffer_get',
'openal_buffer_loadwav',
'openal_context_create',
'openal_context_current',
'openal_context_destroy',
'openal_context_process',
'openal_context_suspend',
'openal_device_close',
'openal_device_open',
'openal_listener_get',
'openal_listener_set',
'openal_source_create',
'openal_source_destroy',
'openal_source_get',
'openal_source_pause',
'openal_source_play',
'openal_source_rewind',
'openal_source_set',
'openal_source_stop',
'openal_stream'],
'OpenSSL': ['openssl_csr_export_to_file',
'openssl_csr_export',
'openssl_csr_get_public_key',
'openssl_csr_get_subject',
'openssl_csr_new',
'openssl_csr_sign',
'openssl_decrypt',
'openssl_dh_compute_key',
'openssl_digest',
'openssl_encrypt',
'openssl_error_string',
'openssl_free_key',
'openssl_get_cipher_methods',
'openssl_get_md_methods',
'openssl_get_privatekey',
'openssl_get_publickey',
'openssl_open',
'openssl_pkcs12_export_to_file',
'openssl_pkcs12_export',
'openssl_pkcs12_read',
'openssl_pkcs7_decrypt',
'openssl_pkcs7_encrypt',
'openssl_pkcs7_sign',
'openssl_pkcs7_verify',
'openssl_pkey_export_to_file',
'openssl_pkey_export',
'openssl_pkey_free',
'openssl_pkey_get_details',
'openssl_pkey_get_private',
'openssl_pkey_get_public',
'openssl_pkey_new',
'openssl_private_decrypt',
'openssl_private_encrypt',
'openssl_public_decrypt',
'openssl_public_encrypt',
'openssl_random_pseudo_bytes',
'openssl_seal',
'openssl_sign',
'openssl_verify',
'openssl_x509_check_private_key',
'openssl_x509_checkpurpose',
'openssl_x509_export_to_file',
'openssl_x509_export',
'openssl_x509_free',
'openssl_x509_parse',
'openssl_x509_read'],
'Output Control': ['flush',
'ob_clean',
'ob_end_clean',
'ob_end_flush',
'ob_flush',
'ob_get_clean',
'ob_get_contents',
'ob_get_flush',
'ob_get_length',
'ob_get_level',
'ob_get_status',
'ob_gzhandler',
'ob_implicit_flush',
'ob_list_handlers',
'ob_start',
'output_add_rewrite_var',
'output_reset_rewrite_vars'],
'Ovrimos SQL': ['ovrimos_close',
'ovrimos_commit',
'ovrimos_connect',
'ovrimos_cursor',
'ovrimos_exec',
'ovrimos_execute',
'ovrimos_fetch_into',
'ovrimos_fetch_row',
'ovrimos_field_len',
'ovrimos_field_name',
'ovrimos_field_num',
'ovrimos_field_type',
'ovrimos_free_result',
'ovrimos_longreadlen',
'ovrimos_num_fields',
'ovrimos_num_rows',
'ovrimos_prepare',
'ovrimos_result_all',
'ovrimos_result',
'ovrimos_rollback'],
'PCNTL': ['pcntl_alarm',
'pcntl_exec',
'pcntl_fork',
'pcntl_getpriority',
'pcntl_setpriority',
'pcntl_signal_dispatch',
'pcntl_signal',
'pcntl_sigprocmask',
'pcntl_sigtimedwait',
'pcntl_sigwaitinfo',
'pcntl_wait',
'pcntl_waitpid',
'pcntl_wexitstatus',
'pcntl_wifexited',
'pcntl_wifsignaled',
'pcntl_wifstopped',
'pcntl_wstopsig',
'pcntl_wtermsig'],
'PCRE': ['preg_filter',
'preg_grep',
'preg_last_error',
'preg_match_all',
'preg_match',
'preg_quote',
'preg_replace_callback',
'preg_replace',
'preg_split'],
'PDF': ['PDF_activate_item',
'PDF_add_annotation',
'PDF_add_bookmark',
'PDF_add_launchlink',
'PDF_add_locallink',
'PDF_add_nameddest',
'PDF_add_note',
'PDF_add_outline',
'PDF_add_pdflink',
'PDF_add_table_cell',
'PDF_add_textflow',
'PDF_add_thumbnail',
'PDF_add_weblink',
'PDF_arc',
'PDF_arcn',
'PDF_attach_file',
'PDF_begin_document',
'PDF_begin_font',
'PDF_begin_glyph',
'PDF_begin_item',
'PDF_begin_layer',
'PDF_begin_page_ext',
'PDF_begin_page',
'PDF_begin_pattern',
'PDF_begin_template_ext',
'PDF_begin_template',
'PDF_circle',
'PDF_clip',
'PDF_close_image',
'PDF_close_pdi_page',
'PDF_close_pdi',
'PDF_close',
'PDF_closepath_fill_stroke',
'PDF_closepath_stroke',
'PDF_closepath',
'PDF_concat',
'PDF_continue_text',
'PDF_create_3dview',
'PDF_create_action',
'PDF_create_annotation',
'PDF_create_bookmark',
'PDF_create_field',
'PDF_create_fieldgroup',
'PDF_create_gstate',
'PDF_create_pvf',
'PDF_create_textflow',
'PDF_curveto',
'PDF_define_layer',
'PDF_delete_pvf',
'PDF_delete_table',
'PDF_delete_textflow',
'PDF_delete',
'PDF_encoding_set_char',
'PDF_end_document',
'PDF_end_font',
'PDF_end_glyph',
'PDF_end_item',
'PDF_end_layer',
'PDF_end_page_ext',
'PDF_end_page',
'PDF_end_pattern',
'PDF_end_template',
'PDF_endpath',
'PDF_fill_imageblock',
'PDF_fill_pdfblock',
'PDF_fill_stroke',
'PDF_fill_textblock',
'PDF_fill',
'PDF_findfont',
'PDF_fit_image',
'PDF_fit_pdi_page',
'PDF_fit_table',
'PDF_fit_textflow',
'PDF_fit_textline',
'PDF_get_apiname',
'PDF_get_buffer',
'PDF_get_errmsg',
'PDF_get_errnum',
'PDF_get_font',
'PDF_get_fontname',
'PDF_get_fontsize',
'PDF_get_image_height',
'PDF_get_image_width',
'PDF_get_majorversion',
'PDF_get_minorversion',
'PDF_get_parameter',
'PDF_get_pdi_parameter',
'PDF_get_pdi_value',
'PDF_get_value',
'PDF_info_font',
'PDF_info_matchbox',
'PDF_info_table',
'PDF_info_textflow',
'PDF_info_textline',
'PDF_initgraphics',
'PDF_lineto',
'PDF_load_3ddata',
'PDF_load_font',
'PDF_load_iccprofile',
'PDF_load_image',
'PDF_makespotcolor',
'PDF_moveto',
'PDF_new',
'PDF_open_ccitt',
'PDF_open_file',
'PDF_open_gif',
'PDF_open_image_file',
'PDF_open_image',
'PDF_open_jpeg',
'PDF_open_memory_image',
'PDF_open_pdi_document',
'PDF_open_pdi_page',
'PDF_open_pdi',
'PDF_open_tiff',
'PDF_pcos_get_number',
'PDF_pcos_get_stream',
'PDF_pcos_get_string',
'PDF_place_image',
'PDF_place_pdi_page',
'PDF_process_pdi',
'PDF_rect',
'PDF_restore',
'PDF_resume_page',
'PDF_rotate',
'PDF_save',
'PDF_scale',
'PDF_set_border_color',
'PDF_set_border_dash',
'PDF_set_border_style',
'PDF_set_char_spacing',
'PDF_set_duration',
'PDF_set_gstate',
'PDF_set_horiz_scaling',
'PDF_set_info_author',
'PDF_set_info_creator',
'PDF_set_info_keywords',
'PDF_set_info_subject',
'PDF_set_info_title',
'PDF_set_info',
'PDF_set_layer_dependency',
'PDF_set_leading',
'PDF_set_parameter',
'PDF_set_text_matrix',
'PDF_set_text_pos',
'PDF_set_text_rendering',
'PDF_set_text_rise',
'PDF_set_value',
'PDF_set_word_spacing',
'PDF_setcolor',
'PDF_setdash',
'PDF_setdashpattern',
'PDF_setflat',
'PDF_setfont',
'PDF_setgray_fill',
'PDF_setgray_stroke',
'PDF_setgray',
'PDF_setlinecap',
'PDF_setlinejoin',
'PDF_setlinewidth',
'PDF_setmatrix',
'PDF_setmiterlimit',
'PDF_setpolydash',
'PDF_setrgbcolor_fill',
'PDF_setrgbcolor_stroke',
'PDF_setrgbcolor',
'PDF_shading_pattern',
'PDF_shading',
'PDF_shfill',
'PDF_show_boxed',
'PDF_show_xy',
'PDF_show',
'PDF_skew',
'PDF_stringwidth',
'PDF_stroke',
'PDF_suspend_page',
'PDF_translate',
'PDF_utf16_to_utf8',
'PDF_utf32_to_utf16',
'PDF_utf8_to_utf16'],
'PHP Options/Info': ['assert_options',
'assert',
'dl',
'extension_loaded',
'gc_collect_cycles',
'gc_disable',
'gc_enable',
'gc_enabled',
'get_cfg_var',
'get_current_user',
'get_defined_constants',
'get_extension_funcs',
'get_include_path',
'get_included_files',
'get_loaded_extensions',
'get_magic_quotes_gpc',
'get_magic_quotes_runtime',
'get_required_files',
'getenv',
'getlastmod',
'getmygid',
'getmyinode',
'getmypid',
'getmyuid',
'getopt',
'getrusage',
'ini_alter',
'ini_get_all',
'ini_get',
'ini_restore',
'ini_set',
'magic_quotes_runtime',
'memory_get_peak_usage',
'memory_get_usage',
'php_ini_loaded_file',
'php_ini_scanned_files',
'php_logo_guid',
'php_sapi_name',
'php_uname',
'phpcredits',
'phpinfo',
'phpversion',
'putenv',
'restore_include_path',
'set_include_path',
'set_magic_quotes_runtime',
'set_time_limit',
'sys_get_temp_dir',
'version_compare',
'zend_logo_guid',
'zend_thread_id',
'zend_version'],
'POSIX': ['posix_access',
'posix_ctermid',
'posix_errno',
'posix_get_last_error',
'posix_getcwd',
'posix_getegid',
'posix_geteuid',
'posix_getgid',
'posix_getgrgid',
'posix_getgrnam',
'posix_getgroups',
'posix_getlogin',
'posix_getpgid',
'posix_getpgrp',
'posix_getpid',
'posix_getppid',
'posix_getpwnam',
'posix_getpwuid',
'posix_getrlimit',
'posix_getsid',
'posix_getuid',
'posix_initgroups',
'posix_isatty',
'posix_kill',
'posix_mkfifo',
'posix_mknod',
'posix_setegid',
'posix_seteuid',
'posix_setgid',
'posix_setpgid',
'posix_setsid',
'posix_setuid',
'posix_strerror',
'posix_times',
'posix_ttyname',
'posix_uname'],
'POSIX Regex': ['ereg_replace',
'ereg',
'eregi_replace',
'eregi',
'split',
'spliti',
'sql_regcase'],
'PS': ['ps_add_bookmark',
'ps_add_launchlink',
'ps_add_locallink',
'ps_add_note',
'ps_add_pdflink',
'ps_add_weblink',
'ps_arc',
'ps_arcn',
'ps_begin_page',
'ps_begin_pattern',
'ps_begin_template',
'ps_circle',
'ps_clip',
'ps_close_image',
'ps_close',
'ps_closepath_stroke',
'ps_closepath',
'ps_continue_text',
'ps_curveto',
'ps_delete',
'ps_end_page',
'ps_end_pattern',
'ps_end_template',
'ps_fill_stroke',
'ps_fill',
'ps_findfont',
'ps_get_buffer',
'ps_get_parameter',
'ps_get_value',
'ps_hyphenate',
'ps_include_file',
'ps_lineto',
'ps_makespotcolor',
'ps_moveto',
'ps_new',
'ps_open_file',
'ps_open_image_file',
'ps_open_image',
'ps_open_memory_image',
'ps_place_image',
'ps_rect',
'ps_restore',
'ps_rotate',
'ps_save',
'ps_scale',
'ps_set_border_color',
'ps_set_border_dash',
'ps_set_border_style',
'ps_set_info',
'ps_set_parameter',
'ps_set_text_pos',
'ps_set_value',
'ps_setcolor',
'ps_setdash',
'ps_setflat',
'ps_setfont',
'ps_setgray',
'ps_setlinecap',
'ps_setlinejoin',
'ps_setlinewidth',
'ps_setmiterlimit',
'ps_setoverprintmode',
'ps_setpolydash',
'ps_shading_pattern',
'ps_shading',
'ps_shfill',
'ps_show_boxed',
'ps_show_xy2',
'ps_show_xy',
'ps_show2',
'ps_show',
'ps_string_geometry',
'ps_stringwidth',
'ps_stroke',
'ps_symbol_name',
'ps_symbol_width',
'ps_symbol',
'ps_translate'],
'Paradox': ['px_close',
'px_create_fp',
'px_date2string',
'px_delete_record',
'px_delete',
'px_get_field',
'px_get_info',
'px_get_parameter',
'px_get_record',
'px_get_schema',
'px_get_value',
'px_insert_record',
'px_new',
'px_numfields',
'px_numrecords',
'px_open_fp',
'px_put_record',
'px_retrieve_record',
'px_set_blob_file',
'px_set_parameter',
'px_set_tablename',
'px_set_targetencoding',
'px_set_value',
'px_timestamp2string',
'px_update_record'],
'Parsekit': ['parsekit_compile_file',
'parsekit_compile_string',
'parsekit_func_arginfo'],
'PostgreSQL': ['pg_affected_rows',
'pg_cancel_query',
'pg_client_encoding',
'pg_close',
'pg_connect',
'pg_connection_busy',
'pg_connection_reset',
'pg_connection_status',
'pg_convert',
'pg_copy_from',
'pg_copy_to',
'pg_dbname',
'pg_delete',
'pg_end_copy',
'pg_escape_bytea',
'pg_escape_string',
'pg_execute',
'pg_fetch_all_columns',
'pg_fetch_all',
'pg_fetch_array',
'pg_fetch_assoc',
'pg_fetch_object',
'pg_fetch_result',
'pg_fetch_row',
'pg_field_is_null',
'pg_field_name',
'pg_field_num',
'pg_field_prtlen',
'pg_field_size',
'pg_field_table',
'pg_field_type_oid',
'pg_field_type',
'pg_free_result',
'pg_get_notify',
'pg_get_pid',
'pg_get_result',
'pg_host',
'pg_insert',
'pg_last_error',
'pg_last_notice',
'pg_last_oid',
'pg_lo_close',
'pg_lo_create',
'pg_lo_export',
'pg_lo_import',
'pg_lo_open',
'pg_lo_read_all',
'pg_lo_read',
'pg_lo_seek',
'pg_lo_tell',
'pg_lo_unlink',
'pg_lo_write',
'pg_meta_data',
'pg_num_fields',
'pg_num_rows',
'pg_options',
'pg_parameter_status',
'pg_pconnect',
'pg_ping',
'pg_port',
'pg_prepare'],
'Printer': ['printer_abort',
'printer_close',
'printer_create_brush',
'printer_create_dc',
'printer_create_font',
'printer_create_pen',
'printer_delete_brush',
'printer_delete_dc',
'printer_delete_font',
'printer_delete_pen',
'printer_draw_bmp',
'printer_draw_chord',
'printer_draw_elipse',
'printer_draw_line',
'printer_draw_pie',
'printer_draw_rectangle',
'printer_draw_roundrect',
'printer_draw_text',
'printer_end_doc',
'printer_end_page',
'printer_get_option',
'printer_list',
'printer_logical_fontheight',
'printer_open',
'printer_select_brush',
'printer_select_font',
'printer_select_pen',
'printer_set_option',
'printer_start_doc',
'printer_start_page',
'printer_write'],
'Program execution': ['escapeshellarg',
'escapeshellcmd',
'exec',
'passthru',
'proc_close',
'proc_get_status',
'proc_nice',
'proc_open',
'proc_terminate',
'shell_exec',
'system'],
'Pspell': ['pspell_add_to_personal',
'pspell_add_to_session',
'pspell_check',
'pspell_clear_session',
'pspell_config_create',
'pspell_config_data_dir',
'pspell_config_dict_dir',
'pspell_config_ignore',
'pspell_config_mode',
'pspell_config_personal',
'pspell_config_repl',
'pspell_config_runtogether',
'pspell_config_save_repl'],
'RPM Reader': ['rpm_close',
'rpm_get_tag',
'rpm_is_valid',
'rpm_open',
'rpm_version'],
'RRD': ['rrd_create',
'rrd_error',
'rrd_fetch',
'rrd_first',
'rrd_graph',
'rrd_info',
'rrd_last',
'rrd_lastupdate',
'rrd_restore',
'rrd_tune',
'rrd_update',
'rrd_xport'],
'Radius': ['radius_acct_open',
'radius_add_server',
'radius_auth_open',
'radius_close',
'radius_config',
'radius_create_request',
'radius_cvt_addr',
'radius_cvt_int',
'radius_cvt_string',
'radius_demangle_mppe_key',
'radius_demangle',
'radius_get_attr',
'radius_get_vendor_attr',
'radius_put_addr',
'radius_put_attr',
'radius_put_int',
'radius_put_string',
'radius_put_vendor_addr',
'radius_put_vendor_attr',
'radius_put_vendor_int',
'radius_put_vendor_string',
'radius_request_authenticator',
'radius_send_request',
'radius_server_secret',
'radius_strerror'],
'Rar': ['rar_wrapper_cache_stats'],
'Readline': ['readline_add_history',
'readline_callback_handler_install',
'readline_callback_handler_remove',
'readline_callback_read_char',
'readline_clear_history',
'readline_completion_function',
'readline_info',
'readline_list_history',
'readline_on_new_line',
'readline_read_history',
'readline_redisplay',
'readline_write_history',
'readline'],
'Recode': ['recode_file', 'recode_string', 'recode'],
'SNMP': ['snmp_get_quick_print',
'snmp_get_valueretrieval',
'snmp_read_mib',
'snmp_set_enum_print',
'snmp_set_oid_numeric_print',
'snmp_set_oid_output_format',
'snmp_set_quick_print',
'snmp_set_valueretrieval',
'snmp2_get',
'snmp2_getnext',
'snmp2_real_walk',
'snmp2_set',
'snmp2_walk',
'snmp3_get',
'snmp3_getnext',
'snmp3_real_walk',
'snmp3_set',
'snmp3_walk',
'snmpget',
'snmpgetnext',
'snmprealwalk',
'snmpset',
'snmpwalk',
'snmpwalkoid'],
'SOAP': ['is_soap_fault', 'use_soap_error_handler'],
'SPL': ['class_implements',
'class_parents',
'iterator_apply',
'iterator_count',
'iterator_to_array',
'spl_autoload_call',
'spl_autoload_extensions',
'spl_autoload_functions',
'spl_autoload_register',
'spl_autoload_unregister',
'spl_autoload',
'spl_classes',
'spl_object_hash'],
'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'],
'SQLite': ['sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes'],
'SSH2': ['ssh2_auth_hostbased_file',
'ssh2_auth_none',
'ssh2_auth_password',
'ssh2_auth_pubkey_file',
'ssh2_connect',
'ssh2_exec',
'ssh2_fetch_stream',
'ssh2_fingerprint',
'ssh2_methods_negotiated',
'ssh2_publickey_add',
'ssh2_publickey_init',
'ssh2_publickey_list',
'ssh2_publickey_remove',
'ssh2_scp_recv',
'ssh2_scp_send',
'ssh2_sftp_lstat',
'ssh2_sftp_mkdir',
'ssh2_sftp_readlink',
'ssh2_sftp_realpath',
'ssh2_sftp_rename',
'ssh2_sftp_rmdir',
'ssh2_sftp_stat',
'ssh2_sftp_symlink',
'ssh2_sftp_unlink',
'ssh2_sftp',
'ssh2_shell',
'ssh2_tunnel'],
'SVN': ['svn_add',
'svn_auth_get_parameter',
'svn_auth_set_parameter',
'svn_blame',
'svn_cat',
'svn_checkout',
'svn_cleanup',
'svn_client_version',
'svn_commit',
'svn_delete',
'svn_diff',
'svn_export',
'svn_fs_abort_txn',
'svn_fs_apply_text',
'svn_fs_begin_txn2',
'svn_fs_change_node_prop',
'svn_fs_check_path',
'svn_fs_contents_changed',
'svn_fs_copy',
'svn_fs_delete',
'svn_fs_dir_entries',
'svn_fs_file_contents',
'svn_fs_file_length',
'svn_fs_is_dir',
'svn_fs_is_file',
'svn_fs_make_dir',
'svn_fs_make_file',
'svn_fs_node_created_rev',
'svn_fs_node_prop',
'svn_fs_props_changed',
'svn_fs_revision_prop',
'svn_fs_revision_root',
'svn_fs_txn_root',
'svn_fs_youngest_rev',
'svn_import',
'svn_log',
'svn_ls',
'svn_mkdir',
'svn_repos_create',
'svn_repos_fs_begin_txn_for_commit',
'svn_repos_fs_commit_txn',
'svn_repos_fs',
'svn_repos_hotcopy',
'svn_repos_open',
'svn_repos_recover',
'svn_revert',
'svn_status',
'svn_update'],
'SWF': ['swf_actiongeturl',
'swf_actiongotoframe',
'swf_actiongotolabel',
'swf_actionnextframe',
'swf_actionplay',
'swf_actionprevframe',
'swf_actionsettarget',
'swf_actionstop',
'swf_actiontogglequality',
'swf_actionwaitforframe',
'swf_addbuttonrecord',
'swf_addcolor',
'swf_closefile',
'swf_definebitmap',
'swf_definefont',
'swf_defineline',
'swf_definepoly',
'swf_definerect',
'swf_definetext',
'swf_endbutton',
'swf_enddoaction',
'swf_endshape',
'swf_endsymbol',
'swf_fontsize',
'swf_fontslant',
'swf_fonttracking',
'swf_getbitmapinfo',
'swf_getfontinfo',
'swf_getframe',
'swf_labelframe',
'swf_lookat',
'swf_modifyobject',
'swf_mulcolor',
'swf_nextid',
'swf_oncondition',
'swf_openfile',
'swf_ortho2',
'swf_ortho',
'swf_perspective',
'swf_placeobject',
'swf_polarview',
'swf_popmatrix',
'swf_posround',
'swf_pushmatrix',
'swf_removeobject',
'swf_rotate',
'swf_scale',
'swf_setfont',
'swf_setframe',
'swf_shapearc',
'swf_shapecurveto3',
'swf_shapecurveto',
'swf_shapefillbitmapclip',
'swf_shapefillbitmaptile',
'swf_shapefilloff',
'swf_shapefillsolid',
'swf_shapelinesolid',
'swf_shapelineto',
'swf_shapemoveto',
'swf_showframe',
'swf_startbutton',
'swf_startdoaction',
'swf_startshape',
'swf_startsymbol',
'swf_textwidth',
'swf_translate',
'swf_viewport'],
'Semaphore': ['ftok',
'msg_get_queue',
'msg_queue_exists',
'msg_receive',
'msg_remove_queue',
'msg_send',
'msg_set_queue',
'msg_stat_queue',
'sem_acquire',
'sem_get',
'sem_release',
'sem_remove',
'shm_attach',
'shm_detach',
'shm_get_var',
'shm_has_var',
'shm_put_var',
'shm_remove_var',
'shm_remove'],
'Session': ['session_cache_expire',
'session_cache_limiter',
'session_commit',
'session_decode',
'session_destroy',
'session_encode',
'session_get_cookie_params',
'session_id',
'session_is_registered',
'session_module_name',
'session_name',
'session_regenerate_id',
'session_register',
'session_save_path',
'session_set_cookie_params',
'session_set_save_handler',
'session_start',
'session_unregister',
'session_unset',
'session_write_close'],
'Session PgSQL': ['session_pgsql_add_error',
'session_pgsql_get_error',
'session_pgsql_get_field',
'session_pgsql_reset',
'session_pgsql_set_field',
'session_pgsql_status'],
'Shared Memory': ['shmop_close',
'shmop_delete',
'shmop_open',
'shmop_read',
'shmop_size',
'shmop_write'],
'SimpleXML': ['simplexml_import_dom',
'simplexml_load_file',
'simplexml_load_string'],
'Socket': ['socket_accept',
'socket_bind',
'socket_clear_error',
'socket_close',
'socket_connect',
'socket_create_listen',
'socket_create_pair',
'socket_create',
'socket_get_option',
'socket_getpeername',
'socket_getsockname',
'socket_last_error',
'socket_listen',
'socket_read',
'socket_recv',
'socket_recvfrom',
'socket_select',
'socket_send',
'socket_sendto',
'socket_set_block',
'socket_set_nonblock',
'socket_set_option',
'socket_shutdown',
'socket_strerror',
'socket_write'],
'Solr': ['solr_get_version'],
'Statistic': ['stats_absolute_deviation',
'stats_cdf_beta',
'stats_cdf_binomial',
'stats_cdf_cauchy',
'stats_cdf_chisquare',
'stats_cdf_exponential',
'stats_cdf_f',
'stats_cdf_gamma',
'stats_cdf_laplace',
'stats_cdf_logistic',
'stats_cdf_negative_binomial',
'stats_cdf_noncentral_chisquare',
'stats_cdf_noncentral_f',
'stats_cdf_poisson',
'stats_cdf_t',
'stats_cdf_uniform',
'stats_cdf_weibull',
'stats_covariance',
'stats_den_uniform',
'stats_dens_beta',
'stats_dens_cauchy',
'stats_dens_chisquare',
'stats_dens_exponential',
'stats_dens_f',
'stats_dens_gamma',
'stats_dens_laplace',
'stats_dens_logistic',
'stats_dens_negative_binomial',
'stats_dens_normal',
'stats_dens_pmf_binomial',
'stats_dens_pmf_hypergeometric',
'stats_dens_pmf_poisson',
'stats_dens_t',
'stats_dens_weibull',
'stats_harmonic_mean',
'stats_kurtosis',
'stats_rand_gen_beta',
'stats_rand_gen_chisquare',
'stats_rand_gen_exponential',
'stats_rand_gen_f',
'stats_rand_gen_funiform',
'stats_rand_gen_gamma',
'stats_rand_gen_ibinomial_negative',
'stats_rand_gen_ibinomial',
'stats_rand_gen_int',
'stats_rand_gen_ipoisson',
'stats_rand_gen_iuniform',
'stats_rand_gen_noncenral_chisquare',
'stats_rand_gen_noncentral_f',
'stats_rand_gen_noncentral_t',
'stats_rand_gen_normal',
'stats_rand_gen_t',
'stats_rand_get_seeds',
'stats_rand_phrase_to_seeds',
'stats_rand_ranf',
'stats_rand_setall',
'stats_skew',
'stats_standard_deviation',
'stats_stat_binomial_coef',
'stats_stat_correlation',
'stats_stat_gennch',
'stats_stat_independent_t',
'stats_stat_innerproduct',
'stats_stat_noncentral_t',
'stats_stat_paired_t',
'stats_stat_percentile',
'stats_stat_powersum',
'stats_variance'],
'Stomp': ['stomp_connect_error', 'stomp_version'],
'Stream': ['set_socket_blocking',
'stream_bucket_append',
'stream_bucket_make_writeable',
'stream_bucket_new',
'stream_bucket_prepend',
'stream_context_create',
'stream_context_get_default',
'stream_context_get_options',
'stream_context_get_params',
'stream_context_set_default',
'stream_context_set_option',
'stream_context_set_params',
'stream_copy_to_stream',
'stream_encoding',
'stream_filter_append',
'stream_filter_prepend',
'stream_filter_register',
'stream_filter_remove',
'stream_get_contents',
'stream_get_filters',
'stream_get_line',
'stream_get_meta_data',
'stream_get_transports',
'stream_get_wrappers',
'stream_is_local',
'stream_notification_callback',
'stream_register_wrapper',
'stream_resolve_include_path',
'stream_select'],
'String': ['addcslashes',
'addslashes',
'bin2hex',
'chop',
'chr',
'chunk_split',
'convert_cyr_string',
'convert_uudecode',
'convert_uuencode',
'count_chars',
'crc32',
'crypt',
'echo',
'explode',
'fprintf',
'get_html_translation_table',
'hebrev',
'hebrevc',
'html_entity_decode',
'htmlentities',
'htmlspecialchars_decode',
'htmlspecialchars',
'implode',
'join',
'lcfirst',
'levenshtein',
'localeconv',
'ltrim',
'md5_file',
'md5',
'metaphone',
'money_format',
'nl_langinfo',
'nl2br',
'number_format',
'ord',
'parse_str',
'print',
'printf',
'quoted_printable_decode',
'quoted_printable_encode',
'quotemeta',
'rtrim',
'setlocale',
'sha1_file',
'sha1',
'similar_text',
'soundex',
'sprintf',
'sscanf',
'str_getcsv',
'str_ireplace',
'str_pad',
'str_repeat',
'str_replace',
'str_rot13',
'str_shuffle',
'str_split',
'str_word_count',
'strcasecmp',
'strchr',
'strcmp',
'strcoll',
'strcspn',
'strip_tags',
'stripcslashes',
'stripos',
'stripslashes',
'stristr',
'strlen',
'strnatcasecmp',
'strnatcmp',
'strncasecmp',
'strncmp',
'strpbrk',
'strpos',
'strrchr',
'strrev',
'strripos',
'strrpos',
'strspn'],
'Sybase': ['sybase_affected_rows',
'sybase_close',
'sybase_connect',
'sybase_data_seek',
'sybase_deadlock_retry_count',
'sybase_fetch_array',
'sybase_fetch_assoc',
'sybase_fetch_field',
'sybase_fetch_object',
'sybase_fetch_row',
'sybase_field_seek',
'sybase_free_result',
'sybase_get_last_message',
'sybase_min_client_severity',
'sybase_min_error_severity',
'sybase_min_message_severity',
'sybase_min_server_severity',
'sybase_num_fields',
'sybase_num_rows',
'sybase_pconnect',
'sybase_query',
'sybase_result',
'sybase_select_db',
'sybase_set_message_handler',
'sybase_unbuffered_query'],
'TCP': ['tcpwrap_check'],
'Tidy': ['ob_tidyhandler',
'tidy_access_count',
'tidy_config_count',
'tidy_error_count',
'tidy_get_error_buffer',
'tidy_get_output',
'tidy_load_config',
'tidy_reset_config',
'tidy_save_config',
'tidy_set_encoding',
'tidy_setopt',
'tidy_warning_count'],
'Tokenizer': ['token_get_all', 'token_name'],
'URL': ['base64_decode',
'base64_encode',
'get_headers',
'get_meta_tags',
'http_build_query',
'parse_url',
'rawurldecode',
'rawurlencode',
'urldecode',
'urlencode'],
'Variable handling': ['debug_zval_dump',
'doubleval',
'empty',
'floatval',
'get_defined_vars',
'get_resource_type',
'gettype',
'import_request_variables',
'intval',
'is_array',
'is_bool',
'is_callable',
'is_double',
'is_float',
'is_int',
'is_integer',
'is_long',
'is_null',
'is_numeric',
'is_object',
'is_real',
'is_resource',
'is_scalar',
'is_string',
'isset',
'print_r',
'serialize',
'settype',
'strval',
'unserialize',
'unset',
'var_dump',
'var_export'],
'W32api': ['w32api_deftype',
'w32api_init_dtype',
'w32api_invoke_function',
'w32api_register_function',
'w32api_set_call_method'],
'WDDX': ['wddx_add_vars',
'wddx_deserialize',
'wddx_packet_end',
'wddx_packet_start',
'wddx_serialize_value',
'wddx_serialize_vars',
'wddx_unserialize'],
'WinCache': ['wincache_fcache_fileinfo',
'wincache_fcache_meminfo',
'wincache_lock',
'wincache_ocache_fileinfo',
'wincache_ocache_meminfo',
'wincache_refresh_if_changed',
'wincache_rplist_fileinfo',
'wincache_rplist_meminfo',
'wincache_scache_info',
'wincache_scache_meminfo',
'wincache_ucache_add',
'wincache_ucache_cas',
'wincache_ucache_clear',
'wincache_ucache_dec',
'wincache_ucache_delete',
'wincache_ucache_exists',
'wincache_ucache_get',
'wincache_ucache_inc',
'wincache_ucache_info',
'wincache_ucache_meminfo',
'wincache_ucache_set',
'wincache_unlock'],
'XML Parser': ['utf8_decode'],
'XML-RPC': ['xmlrpc_decode_request',
'xmlrpc_decode',
'xmlrpc_encode_request',
'xmlrpc_encode',
'xmlrpc_get_type',
'xmlrpc_is_fault',
'xmlrpc_parse_method_descriptions',
'xmlrpc_server_add_introspection_data',
'xmlrpc_server_call_method',
'xmlrpc_server_create',
'xmlrpc_server_destroy',
'xmlrpc_server_register_introspection_callback',
'xmlrpc_server_register_method',
'xmlrpc_set_type'],
'XSLT (PHP4)': ['xslt_backend_info',
'xslt_backend_name',
'xslt_backend_version',
'xslt_create',
'xslt_errno',
'xslt_error',
'xslt_free',
'xslt_getopt',
'xslt_process',
'xslt_set_base',
'xslt_set_encoding',
'xslt_set_error_handler',
'xslt_set_log',
'xslt_set_object',
'xslt_set_sax_handler',
'xslt_set_sax_handlers',
'xslt_set_scheme_handler',
'xslt_set_scheme_handlers',
'xslt_setopt'],
'YAZ': ['yaz_addinfo',
'yaz_ccl_conf',
'yaz_ccl_parse',
'yaz_close',
'yaz_connect',
'yaz_database',
'yaz_element',
'yaz_errno',
'yaz_error',
'yaz_es_result',
'yaz_es',
'yaz_get_option',
'yaz_hits',
'yaz_itemorder',
'yaz_present',
'yaz_range',
'yaz_record',
'yaz_scan_result',
'yaz_scan',
'yaz_schema',
'yaz_search',
'yaz_set_option',
'yaz_sort',
'yaz_syntax',
'yaz_wait'],
'YP/NIS': ['yp_all',
'yp_cat',
'yp_err_string',
'yp_errno',
'yp_first',
'yp_get_default_domain',
'yp_master',
'yp_match',
'yp_next',
'yp_order'],
'Yaml': ['yaml_emit_file',
'yaml_emit',
'yaml_parse_file',
'yaml_parse_url',
'yaml_parse'],
'Zip': ['zip_close',
'zip_entry_close',
'zip_entry_compressedsize',
'zip_entry_compressionmethod',
'zip_entry_filesize',
'zip_entry_name',
'zip_entry_open',
'zip_entry_read',
'zip_open',
'zip_read'],
'Zlib': ['gzclose',
'gzcompress',
'gzdecode',
'gzdeflate',
'gzencode',
'gzeof',
'gzfile',
'gzgetc',
'gzgets',
'gzgetss',
'gzinflate',
'gzopen',
'gzpassthru',
'gzputs',
'gzread',
'gzrewind',
'gzseek',
'gztell',
'gzuncompress',
'gzwrite',
'readgzfile',
'zlib_get_coding_type'],
'bcompiler': ['bcompiler_load_exe',
'bcompiler_load',
'bcompiler_parse_class',
'bcompiler_read',
'bcompiler_write_class',
'bcompiler_write_constant',
'bcompiler_write_exe_footer',
'bcompiler_write_file',
'bcompiler_write_footer',
'bcompiler_write_function',
'bcompiler_write_functions_from_file',
'bcompiler_write_header',
'bcompiler_write_included_filename'],
'cURL': ['curl_close',
'curl_copy_handle',
'curl_errno',
'curl_error',
'curl_exec',
'curl_getinfo',
'curl_init',
'curl_multi_add_handle',
'curl_multi_close',
'curl_multi_exec',
'curl_multi_getcontent',
'curl_multi_info_read',
'curl_multi_init',
'curl_multi_remove_handle',
'curl_multi_select',
'curl_setopt_array',
'curl_setopt',
'curl_version'],
'chdb': ['chdb_create'],
'dBase': ['dbase_add_record',
'dbase_close',
'dbase_create',
'dbase_delete_record',
'dbase_get_header_info',
'dbase_get_record_with_names',
'dbase_get_record',
'dbase_numfields',
'dbase_numrecords',
'dbase_open',
'dbase_pack',
'dbase_replace_record'],
'dbx': ['dbx_close',
'dbx_compare',
'dbx_connect',
'dbx_error',
'dbx_escape_string',
'dbx_fetch_row'],
'filePro': ['filepro_fieldcount',
'filepro_fieldname',
'filepro_fieldtype',
'filepro_fieldwidth',
'filepro_retrieve',
'filepro_rowcount',
'filepro'],
'iconv': ['iconv_get_encoding',
'iconv_mime_decode_headers',
'iconv_mime_decode',
'iconv_mime_encode',
'iconv_set_encoding',
'iconv_strlen',
'iconv_strpos',
'iconv_strrpos',
'iconv_substr',
'iconv',
'ob_iconv_handler'],
'inclued': ['inclued_get_data'],
'intl': ['intl_error_name',
'intl_get_error_code',
'intl_get_error_message',
'intl_is_failure'],
'libxml': ['libxml_clear_errors',
'libxml_disable_entity_loader',
'libxml_get_errors',
'libxml_get_last_error',
'libxml_set_streams_context',
'libxml_use_internal_errors'],
'mSQL': ['msql_affected_rows',
'msql_close',
'msql_connect',
'msql_create_db',
'msql_createdb',
'msql_data_seek',
'msql_db_query',
'msql_dbname',
'msql_drop_db',
'msql_error',
'msql_fetch_array',
'msql_fetch_field',
'msql_fetch_object',
'msql_fetch_row',
'msql_field_flags',
'msql_field_len',
'msql_field_name',
'msql_field_seek',
'msql_field_table',
'msql_field_type',
'msql_fieldflags',
'msql_fieldlen',
'msql_fieldname',
'msql_fieldtable',
'msql_fieldtype',
'msql_free_result',
'msql_list_dbs',
'msql_list_fields',
'msql_list_tables',
'msql_num_fields',
'msql_num_rows',
'msql_numfields',
'msql_numrows',
'msql_pconnect',
'msql_query',
'msql_regcase',
'msql_result',
'msql_select_db',
'msql_tablename',
'msql'],
'mnoGoSearch': ['udm_add_search_limit',
'udm_alloc_agent_array',
'udm_alloc_agent',
'udm_api_version',
'udm_cat_list',
'udm_cat_path',
'udm_check_charset',
'udm_check_stored',
'udm_clear_search_limits',
'udm_close_stored',
'udm_crc32',
'udm_errno',
'udm_error',
'udm_find',
'udm_free_agent',
'udm_free_ispell_data',
'udm_free_res',
'udm_get_doc_count',
'udm_get_res_field',
'udm_get_res_param',
'udm_hash32',
'udm_load_ispell_data',
'udm_open_stored',
'udm_set_agent_param'],
'mqseries': ['mqseries_back',
'mqseries_begin',
'mqseries_close',
'mqseries_cmit',
'mqseries_conn',
'mqseries_connx',
'mqseries_disc',
'mqseries_get',
'mqseries_inq',
'mqseries_open',
'mqseries_put1',
'mqseries_put',
'mqseries_set',
'mqseries_strerror'],
'mysqlnd_qc': ['mysqlnd_qc_change_handler',
'mysqlnd_qc_clear_cache',
'mysqlnd_qc_get_cache_info',
'mysqlnd_qc_get_core_stats',
'mysqlnd_qc_get_handler',
'mysqlnd_qc_get_query_trace_log',
'mysqlnd_qc_set_user_handlers'],
'qtdom': ['qdom_error', 'qdom_tree'],
'runkit': ['runkit_class_adopt',
'runkit_class_emancipate',
'runkit_constant_add',
'runkit_constant_redefine',
'runkit_constant_remove',
'runkit_function_add',
'runkit_function_copy',
'runkit_function_redefine',
'runkit_function_remove',
'runkit_function_rename',
'runkit_import',
'runkit_lint_file',
'runkit_lint',
'runkit_method_add',
'runkit_method_copy',
'runkit_method_redefine',
'runkit_method_remove',
'runkit_method_rename',
'runkit_return_value_used',
'runkit_sandbox_output_handler',
'runkit_superglobals'],
'ssdeep': ['ssdeep_fuzzy_compare',
'ssdeep_fuzzy_hash_filename',
'ssdeep_fuzzy_hash'],
'vpopmail': ['vpopmail_add_alias_domain_ex',
'vpopmail_add_alias_domain',
'vpopmail_add_domain_ex',
'vpopmail_add_domain',
'vpopmail_add_user',
'vpopmail_alias_add',
'vpopmail_alias_del_domain',
'vpopmail_alias_del',
'vpopmail_alias_get_all',
'vpopmail_alias_get',
'vpopmail_auth_user',
'vpopmail_del_domain_ex',
'vpopmail_del_domain',
'vpopmail_del_user',
'vpopmail_error',
'vpopmail_passwd',
'vpopmail_set_user_quota'],
'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'],
'win32service': ['win32_continue_service',
'win32_create_service',
'win32_delete_service',
'win32_get_last_control_message',
'win32_pause_service',
'win32_query_service_status',
'win32_set_service_status',
'win32_start_service_ctrl_dispatcher',
'win32_start_service',
'win32_stop_service'],
'xattr': ['xattr_get',
'xattr_list',
'xattr_remove',
'xattr_set',
'xattr_supported'],
'xdiff': ['xdiff_file_bdiff_size',
'xdiff_file_bdiff',
'xdiff_file_bpatch',
'xdiff_file_diff_binary',
'xdiff_file_diff',
'xdiff_file_merge3',
'xdiff_file_patch_binary',
'xdiff_file_patch',
'xdiff_file_rabdiff',
'xdiff_string_bdiff_size',
'xdiff_string_bdiff',
'xdiff_string_bpatch',
'xdiff_string_diff_binary',
'xdiff_string_diff',
'xdiff_string_merge3',
'xdiff_string_patch_binary',
'xdiff_string_patch',
'xdiff_string_rabdiff']}
if __name__ == '__main__':
import glob
import os
import pprint
import re
import shutil
import tarfile
import urllib
PHP_MANUAL_URL = 'http://us3.php.net/distributions/manual/php_manual_en.tar.gz'
PHP_MANUAL_DIR = './php-chunked-xhtml/'
PHP_REFERENCE_GLOB = 'ref.*'
PHP_FUNCTION_RE = '<a href="function\..*?\.html">(.*?)</a>'
PHP_MODULE_RE = '<title>(.*?) Functions</title>'
def get_php_functions():
function_re = re.compile(PHP_FUNCTION_RE)
module_re = re.compile(PHP_MODULE_RE)
modules = {}
for file in get_php_references():
module = ''
for line in open(file):
if not module:
search = module_re.search(line)
if search:
module = search.group(1)
modules[module] = []
elif '<h2>Table of Contents</h2>' in line:
for match in function_re.finditer(line):
fn = match.group(1)
if '->' not in fn and '::' not in fn:
modules[module].append(fn)
# These are dummy manual pages, not actual functions
if module == 'PHP Options/Info':
modules[module].remove('main')
elif module == 'Filesystem':
modules[module].remove('delete')
if not modules[module]:
del modules[module]
break
return modules
def get_php_references():
download = urllib.urlretrieve(PHP_MANUAL_URL)
tar = tarfile.open(download[0])
tar.extractall()
tar.close()
for file in glob.glob("%s%s" % (PHP_MANUAL_DIR, PHP_REFERENCE_GLOB)):
yield file
os.remove(download[0])
def regenerate(filename, modules):
f = open(filename)
try:
content = f.read()
finally:
f.close()
header = content[:content.find('MODULES = {')]
footer = content[content.find("if __name__ == '__main__':"):]
f = open(filename, 'w')
f.write(header)
f.write('MODULES = %s\n\n' % pprint.pformat(modules))
f.write(footer)
f.close()
def run():
print '>> Downloading Function Index'
modules = get_php_functions()
total = sum(len(v) for v in modules.itervalues())
print '%d functions found' % total
regenerate(__file__, modules)
shutil.rmtree(PHP_MANUAL_DIR)
run()
| mit |
txemi/ansible | lib/ansible/modules/cloud/amazon/s3_lifecycle.py | 71 | 15738 | #!/usr/bin/python
#
# This is a free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This Ansible library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: s3_lifecycle
short_description: Manage s3 bucket lifecycle rules in AWS
description:
- Manage s3 bucket lifecycle rules in AWS
version_added: "2.0"
author: "Rob White (@wimnat)"
notes:
- If specifying expiration time as days then transition time must also be specified in days
- If specifying expiration time as a date then transition time must also be specified as a date
requirements:
- python-dateutil
options:
name:
description:
- "Name of the s3 bucket"
required: true
expiration_date:
description:
- >
Indicates the lifetime of the objects that are subject to the rule by the date they will expire. The value must be ISO-8601 format, the time must
be midnight and a GMT timezone must be specified.
required: false
default: null
expiration_days:
description:
- "Indicates the lifetime, in days, of the objects that are subject to the rule. The value must be a non-zero positive integer."
required: false
default: null
prefix:
description:
- "Prefix identifying one or more objects to which the rule applies. If no prefix is specified, the rule will apply to the whole bucket."
required: false
default: null
rule_id:
description:
- "Unique identifier for the rule. The value cannot be longer than 255 characters. A unique value for the rule will be generated if no value is provided."
required: false
default: null
state:
description:
- "Create or remove the lifecycle rule"
required: false
default: present
choices: [ 'present', 'absent' ]
status:
description:
- "If 'enabled', the rule is currently being applied. If 'disabled', the rule is not currently being applied."
required: false
default: enabled
choices: [ 'enabled', 'disabled' ]
storage_class:
description:
- "The storage class to transition to. Currently there are two supported values - 'glacier' or 'standard_ia'."
- "The 'standard_ia' class is only being available from Ansible version 2.2."
required: false
default: glacier
choices: [ 'glacier', 'standard_ia']
transition_date:
description:
- >
Indicates the lifetime of the objects that are subject to the rule by the date they will transition to a different storage class.
The value must be ISO-8601 format, the time must be midnight and a GMT timezone must be specified. If transition_days is not specified,
this parameter is required."
required: false
default: null
transition_days:
description:
- "Indicates when, in days, an object transitions to a different storage class. If transition_date is not specified, this parameter is required."
required: false
default: null
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Configure a lifecycle rule on a bucket to expire (delete) items with a prefix of /logs/ after 30 days
- s3_lifecycle:
name: mybucket
expiration_days: 30
prefix: /logs/
status: enabled
state: present
# Configure a lifecycle rule to transition all items with a prefix of /logs/ to glacier after 7 days and then delete after 90 days
- s3_lifecycle:
name: mybucket
transition_days: 7
expiration_days: 90
prefix: /logs/
status: enabled
state: present
# Configure a lifecycle rule to transition all items with a prefix of /logs/ to glacier on 31 Dec 2020 and then delete on 31 Dec 2030.
# Note that midnight GMT must be specified.
# Be sure to quote your date strings
- s3_lifecycle:
name: mybucket
transition_date: "2020-12-30T00:00:00.000Z"
expiration_date: "2030-12-30T00:00:00.000Z"
prefix: /logs/
status: enabled
state: present
# Disable the rule created above
- s3_lifecycle:
name: mybucket
prefix: /logs/
status: disabled
state: present
# Delete the lifecycle rule created above
- s3_lifecycle:
name: mybucket
prefix: /logs/
state: absent
# Configure a lifecycle rule to transition all backup files older than 31 days in /backups/ to standard infrequent access class.
- s3_lifecycle:
name: mybucket
prefix: /backups/
storage_class: standard_ia
transition_days: 31
state: present
status: enabled
'''
import xml.etree.ElementTree as ET
import copy
import datetime
try:
import dateutil.parser
HAS_DATEUTIL = True
except ImportError:
HAS_DATEUTIL = False
try:
import boto
import boto.ec2
from boto.s3.connection import OrdinaryCallingFormat, Location
from boto.s3.lifecycle import Lifecycle, Rule, Expiration, Transition
from boto.exception import BotoServerError, S3CreateError, S3ResponseError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, ec2_argument_spec, get_aws_connection_info
def create_lifecycle_rule(connection, module):
name = module.params.get("name")
expiration_date = module.params.get("expiration_date")
expiration_days = module.params.get("expiration_days")
prefix = module.params.get("prefix")
rule_id = module.params.get("rule_id")
status = module.params.get("status")
storage_class = module.params.get("storage_class")
transition_date = module.params.get("transition_date")
transition_days = module.params.get("transition_days")
changed = False
try:
bucket = connection.get_bucket(name)
except S3ResponseError as e:
module.fail_json(msg=e.message)
# Get the bucket's current lifecycle rules
try:
current_lifecycle_obj = bucket.get_lifecycle_config()
except S3ResponseError as e:
if e.error_code == "NoSuchLifecycleConfiguration":
current_lifecycle_obj = Lifecycle()
else:
module.fail_json(msg=e.message)
# Create expiration
if expiration_days is not None:
expiration_obj = Expiration(days=expiration_days)
elif expiration_date is not None:
expiration_obj = Expiration(date=expiration_date)
else:
expiration_obj = None
# Create transition
if transition_days is not None:
transition_obj = Transition(days=transition_days, storage_class=storage_class.upper())
elif transition_date is not None:
transition_obj = Transition(date=transition_date, storage_class=storage_class.upper())
else:
transition_obj = None
# Create rule
rule = Rule(rule_id, prefix, status.title(), expiration_obj, transition_obj)
# Create lifecycle
lifecycle_obj = Lifecycle()
appended = False
# If current_lifecycle_obj is not None then we have rules to compare, otherwise just add the rule
if current_lifecycle_obj:
# If rule ID exists, use that for comparison otherwise compare based on prefix
for existing_rule in current_lifecycle_obj:
if rule.id == existing_rule.id:
if compare_rule(rule, existing_rule):
lifecycle_obj.append(rule)
appended = True
else:
lifecycle_obj.append(rule)
changed = True
appended = True
elif rule.prefix == existing_rule.prefix:
existing_rule.id = None
if compare_rule(rule, existing_rule):
lifecycle_obj.append(rule)
appended = True
else:
lifecycle_obj.append(rule)
changed = True
appended = True
else:
lifecycle_obj.append(existing_rule)
# If nothing appended then append now as the rule must not exist
if not appended:
lifecycle_obj.append(rule)
changed = True
else:
lifecycle_obj.append(rule)
changed = True
# Write lifecycle to bucket
try:
bucket.configure_lifecycle(lifecycle_obj)
except S3ResponseError as e:
module.fail_json(msg=e.message)
module.exit_json(changed=changed)
def compare_rule(rule_a, rule_b):
# Copy objects
rule1 = copy.deepcopy(rule_a)
rule2 = copy.deepcopy(rule_b)
# Delete Rule from Rule
try:
del rule1.Rule
except AttributeError:
pass
try:
del rule2.Rule
except AttributeError:
pass
# Extract Expiration and Transition objects
rule1_expiration = rule1.expiration
rule1_transition = rule1.transition
rule2_expiration = rule2.expiration
rule2_transition = rule2.transition
# Delete the Expiration and Transition objects from the Rule objects
del rule1.expiration
del rule1.transition
del rule2.expiration
del rule2.transition
# Compare
if rule1_transition is None:
rule1_transition = Transition()
if rule2_transition is None:
rule2_transition = Transition()
if rule1_expiration is None:
rule1_expiration = Expiration()
if rule2_expiration is None:
rule2_expiration = Expiration()
if (rule1.__dict__ == rule2.__dict__ and
rule1_expiration.__dict__ == rule2_expiration.__dict__ and
rule1_transition.__dict__ == rule2_transition.__dict__):
return True
else:
return False
def destroy_lifecycle_rule(connection, module):
name = module.params.get("name")
prefix = module.params.get("prefix")
rule_id = module.params.get("rule_id")
changed = False
if prefix is None:
prefix = ""
try:
bucket = connection.get_bucket(name)
except S3ResponseError as e:
module.fail_json(msg=e.message)
# Get the bucket's current lifecycle rules
try:
current_lifecycle_obj = bucket.get_lifecycle_config()
except S3ResponseError as e:
if e.error_code == "NoSuchLifecycleConfiguration":
module.exit_json(changed=changed)
else:
module.fail_json(msg=e.message)
# Create lifecycle
lifecycle_obj = Lifecycle()
# Check if rule exists
# If an ID exists, use that otherwise compare based on prefix
if rule_id is not None:
for existing_rule in current_lifecycle_obj:
if rule_id == existing_rule.id:
# We're not keeping the rule (i.e. deleting) so mark as changed
changed = True
else:
lifecycle_obj.append(existing_rule)
else:
for existing_rule in current_lifecycle_obj:
if prefix == existing_rule.prefix:
# We're not keeping the rule (i.e. deleting) so mark as changed
changed = True
else:
lifecycle_obj.append(existing_rule)
# Write lifecycle to bucket or, if there no rules left, delete lifecycle configuration
try:
if lifecycle_obj:
bucket.configure_lifecycle(lifecycle_obj)
else:
bucket.delete_lifecycle_configuration()
except BotoServerError as e:
module.fail_json(msg=e.message)
module.exit_json(changed=changed)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name = dict(required=True, type='str'),
expiration_days = dict(default=None, required=False, type='int'),
expiration_date = dict(default=None, required=False, type='str'),
prefix = dict(default=None, required=False),
requester_pays = dict(default='no', type='bool'),
rule_id = dict(required=False, type='str'),
state = dict(default='present', choices=['present', 'absent']),
status = dict(default='enabled', choices=['enabled', 'disabled']),
storage_class = dict(default='glacier', type='str', choices=['glacier', 'standard_ia']),
transition_days = dict(default=None, required=False, type='int'),
transition_date = dict(default=None, required=False, type='str')
)
)
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive = [
[ 'expiration_days', 'expiration_date' ],
[ 'expiration_days', 'transition_date' ],
[ 'transition_days', 'transition_date' ],
[ 'transition_days', 'expiration_date' ]
]
)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
if not HAS_DATEUTIL:
module.fail_json(msg='dateutil required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region in ('us-east-1', '', None):
# S3ism for the US Standard region
location = Location.DEFAULT
else:
# Boto uses symbolic names for locations but region strings will
# actually work fine for everything except us-east-1 (US Standard)
location = region
try:
connection = boto.s3.connect_to_region(location, is_secure=True, calling_format=OrdinaryCallingFormat(), **aws_connect_params)
# use this as fallback because connect_to_region seems to fail in boto + non 'classic' aws accounts in some cases
if connection is None:
connection = boto.connect_s3(**aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
expiration_date = module.params.get("expiration_date")
transition_date = module.params.get("transition_date")
state = module.params.get("state")
storage_class = module.params.get("storage_class")
# If expiration_date set, check string is valid
if expiration_date is not None:
try:
datetime.datetime.strptime(expiration_date, "%Y-%m-%dT%H:%M:%S.000Z")
except ValueError as e:
module.fail_json(msg="expiration_date is not a valid ISO-8601 format. The time must be midnight and a timezone of GMT must be included")
if transition_date is not None:
try:
datetime.datetime.strptime(transition_date, "%Y-%m-%dT%H:%M:%S.000Z")
except ValueError as e:
module.fail_json(msg="expiration_date is not a valid ISO-8601 format. The time must be midnight and a timezone of GMT must be included")
boto_required_version = (2,40,0)
if storage_class == 'standard_ia' and tuple(map(int, (boto.__version__.split(".")))) < boto_required_version:
module.fail_json(msg="'standard_ia' class requires boto >= 2.40.0")
if state == 'present':
create_lifecycle_rule(connection, module)
elif state == 'absent':
destroy_lifecycle_rule(connection, module)
if __name__ == '__main__':
main()
| gpl-3.0 |
spiceqa/virt-test | tests/dd_test.py | 3 | 3846 | """
Configurable on-guest dd test.
:author: Lukas Doktor <ldoktor@redhat.com>
:copyright: 2012 Red Hat, Inc.
"""
import logging
from autotest.client.shared import error
from virttest import aexpect
@error.context_aware
def run_dd_test(test, params, env):
"""
Executes dd with defined parameters and checks the return number and output
Test steps:
1). wait guest boot up
2). run dd command in guest with special params(eg. oflag, bs and so on)
3). check command exit stauts and output
"""
def _get_file(filename, select):
""" Picks the actual file based on select value """
if filename == "NULL":
return "/dev/null"
elif filename == "ZERO":
return "/dev/zero"
elif filename == "RANDOM":
return "/dev/random"
elif filename == "URANDOM":
return "/dev/urandom"
else:
# get all matching filenames
try:
disks = sorted(session.cmd("ls -1d %s" % filename).split('\n'))
except aexpect.ShellCmdError: # No matching file (creating new?)
disks = [filename]
if disks[-1] == '':
disks = disks[:-1]
try:
return disks[select]
except IndexError:
err = ("Incorrect cfg: dd_select out of the range (disks=%s,"
" select=%s)" % (disks, select))
logging.error(err)
raise error.TestError(err)
vm = env.get_vm(params['main_vm'])
timeout = int(params.get("login_timeout", 360))
error.context("Wait guest boot up", logging.info)
session = vm.wait_for_login(timeout=timeout)
dd_if = params.get("dd_if")
dd_if_select = int(params.get("dd_if_select", '-1'))
dd_of = params.get("dd_of")
dd_of_select = int(params.get("dd_of_select", '-1'))
dd_bs = params.get("dd_bs")
dd_count = params.get("dd_count")
dd_iflag = params.get("dd_iflag")
dd_oflag = params.get("dd_oflag")
dd_skip = params.get("dd_skip")
dd_seek = params.get("dd_seek")
dd_timeout = int(params.get("dd_timeout", 60))
dd_output = params.get("dd_output", "")
dd_stat = int(params.get("dd_stat", 0))
dd_cmd = "dd"
if dd_if:
dd_if = _get_file(dd_if, dd_if_select)
dd_cmd += " if=%s" % dd_if
if dd_of:
dd_of = _get_file(dd_of, dd_of_select)
dd_cmd += " of=%s" % dd_of
if dd_bs:
dd_cmd += " bs=%s" % dd_bs
if dd_count:
dd_cmd += " count=%s" % dd_count
if dd_iflag:
dd_cmd += " iflag=%s" % dd_iflag
if dd_oflag:
dd_cmd += " oflag=%s" % dd_oflag
if dd_skip:
dd_cmd += " skip=%s" % dd_skip
if dd_seek:
dd_cmd += " seek=%s" % dd_seek
logging.info("Using '%s' cmd", dd_cmd)
error.context("Execute dd in guest", logging.info)
try:
(stat, out) = session.cmd_status_output(dd_cmd, timeout=dd_timeout)
except aexpect.ShellTimeoutError:
err = ("dd command timed-out (cmd='%s', timeout=%d)"
% (dd_cmd, dd_timeout))
logging.error(err)
raise error.TestFail(err)
except aexpect.ShellCmdError, details:
stat = details.status
out = details.output
error.context("Check command exit status and output", logging.info)
logging.debug("Returned dd_status: %s\nReturned output:\n%s", stat, out)
if stat != dd_stat:
err = ("Return code doesn't match (expected=%s, actual=%s)\n"
"Output:\n%s" % (dd_stat, stat, out))
logging.error(err)
raise error.TestFail(err)
if dd_output not in out:
err = ("Output doesn't match:\nExpected:\n%s\nActual:\n%s"
% (dd_output, out))
raise error.TestFail(err)
logging.info("dd test succeeded.")
return
| gpl-2.0 |
dracos/QGIS | python/plugins/processing/algs/lidar/lastools/las2dem.py | 9 | 3423 | # -*- coding: utf-8 -*-
"""
***************************************************************************
las2dem.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
---------------------
Date : September 2013
Copyright : (C) 2013 by Martin Isenburg
Email : martin near rapidlasso point com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from LAStoolsUtils import LAStoolsUtils
from LAStoolsAlgorithm import LAStoolsAlgorithm
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterBoolean
class las2dem(LAStoolsAlgorithm):
ATTRIBUTE = "ATTRIBUTE"
PRODUCT = "PRODUCT"
ATTRIBUTES = ["elevation", "slope", "intensity", "rgb", "edge_longest", "edge_shortest"]
PRODUCTS = ["actual values", "hillshade", "gray", "false"]
USE_TILE_BB = "USE_TILE_BB"
def defineCharacteristics(self):
self.name = "las2dem"
self.group = "LAStools"
self.addParametersVerboseGUI()
self.addParametersPointInputGUI()
self.addParametersFilter1ReturnClassFlagsGUI()
self.addParametersStepGUI()
self.addParameter(ParameterSelection(las2dem.ATTRIBUTE,
self.tr("Attribute"), las2dem.ATTRIBUTES, 0))
self.addParameter(ParameterSelection(las2dem.PRODUCT,
self.tr("Product"), las2dem.PRODUCTS, 0))
self.addParameter(ParameterBoolean(las2dem.USE_TILE_BB,
self.tr("use tile bounding box (after tiling with buffer)"), False))
self.addParametersRasterOutputGUI()
self.addParametersAdditionalGUI()
def processAlgorithm(self, progress):
commands = [os.path.join(LAStoolsUtils.LAStoolsPath(), "bin", "las2dem")]
self.addParametersVerboseCommands(commands)
self.addParametersPointInputCommands(commands)
self.addParametersFilter1ReturnClassFlagsCommands(commands)
self.addParametersStepCommands(commands)
attribute = self.getParameterValue(las2dem.ATTRIBUTE)
if attribute != 0:
commands.append("-" + las2dem.ATTRIBUTES[attribute])
product = self.getParameterValue(las2dem.PRODUCT)
if product != 0:
commands.append("-" + las2dem.PRODUCTS[product])
if (self.getParameterValue(las2dem.USE_TILE_BB)):
commands.append("-use_tile_bb")
self.addParametersRasterOutputCommands(commands)
self.addParametersAdditionalCommands(commands)
LAStoolsUtils.runLAStools(commands, progress)
| gpl-2.0 |
qsnake/gpaw | gpaw/test/wannier_ethylene.py | 1 | 2008 | import os
from ase import Atom, Atoms
from gpaw import GPAW
from gpaw.test import equal
from gpaw.wannier import Wannier
import numpy as np
# GPAW wannier example for ethylene corresponding to the ASE Wannier
# tutorial.
a = 6.0 # Size of unit cell (Angstrom)
ethylene = Atoms([Atom('H', (-1.235,-0.936 , 0 )),
Atom('H', (-1.235, 0.936 , 0 )),
Atom('C', (-0.660, 0.000 , 0 )),
Atom('C', ( 0.660, 0.000 , 0 )),
Atom('H', ( 1.235,-0.936 , 0 )),
Atom('H', ( 1.235, 0.936 , 0 ))],
cell=(a, a, a), pbc=True)
ethylene.center()
calc = GPAW(nbands=8, gpts=(32, 32, 32), convergence={'eigenstates': 1e-6})
ethylene.set_calculator(calc)
e = ethylene.get_potential_energy()
niter = calc.get_number_of_iterations()
energy_tolerance = 0.00003
niter_tolerance = 0
equal(e, -33.3232491, energy_tolerance)
equal(niter, 25, niter_tolerance)
def check(calc):
wannier = Wannier(calc, nbands=6)
wannier.localize()
centers = wannier.get_centers()
print centers
expected = [[1.950, 2.376, 3.000],
[1.950, 3.624, 3.000],
[3.000, 3.000, 2.671],
[3.000, 3.000, 3.329],
[4.050, 2.376, 3.000],
[4.050, 3.624, 3.000]]
equal(13.7995, wannier.value, 0.016)
for center in centers:
i = 0
while np.sum((expected[i] - center)**2) > 0.01:
i += 1
if i == len(expected):
raise RuntimeError, 'Correct center not found'
expected.pop(i)
check(calc)
calc.write('ethylene.gpw', 'all')
check(GPAW('ethylene.gpw', txt=None))
## for i in range(6):
## wannier.write_cube(i, 'ethylene%s.cube' % i, real=True)
## from ASE.Visualization.PrimiPlotter import PrimiPlotter, X11Window
## ethylene.extend(wannier.get_centers_as_atoms())
## plot = PrimiPlotter(ethylene)
## plot.set_output(X11Window())
## plot.set_radii(.2)
## plot.set_rotation([15, 0, 0])
## plot.plot()
| gpl-3.0 |
marratj/ansible | test/units/modules/cloud/amazon/test_s3.py | 22 | 1440 | import pytest
import unittest
try:
import ansible.modules.cloud.amazon.s3 as s3
except ImportError:
from nose.plugins.skip import SkipTest
raise SkipTest("This test requires the s3 Python libraries")
from ansible.module_utils.six.moves.urllib.parse import urlparse
boto3 = pytest.importorskip("boto3")
class TestUrlparse(unittest.TestCase):
def test_urlparse(self):
actual = urlparse("http://test.com/here")
self.assertEqual("http", actual.scheme)
self.assertEqual("test.com", actual.netloc)
self.assertEqual("/here", actual.path)
def test_is_fakes3(self):
actual = s3.is_fakes3("fakes3://bla.blubb")
self.assertEqual(True, actual)
def test_is_walrus(self):
actual = s3.is_walrus("trulywalrus_but_invalid_url")
# I don't know if this makes sense, but this is the current behaviour...
self.assertEqual(True, actual)
actual = s3.is_walrus("http://notwalrus.amazonaws.com")
self.assertEqual(False, actual)
def test_get_s3_connection(self):
aws_connect_kwargs = dict(aws_access_key_id="access_key",
aws_secret_access_key="secret_key")
location = None
rgw = True
s3_url = "http://bla.blubb"
actual = s3.get_s3_connection(None, aws_connect_kwargs, location, rgw, s3_url)
self.assertEqual(bool("bla.blubb" in str(actual._endpoint)), True)
| gpl-3.0 |
spvkgn/youtube-dl | youtube_dl/extractor/digg.py | 28 | 1882 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import js_to_json
class DiggIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?digg\.com/video/(?P<id>[^/?#&]+)'
_TESTS = [{
# JWPlatform via provider
'url': 'http://digg.com/video/sci-fi-short-jonah-daniel-kaluuya-get-out',
'info_dict': {
'id': 'LcqvmS0b',
'ext': 'mp4',
'title': "'Get Out' Star Daniel Kaluuya Goes On 'Moby Dick'-Like Journey In Sci-Fi Short 'Jonah'",
'description': 'md5:541bb847648b6ee3d6514bc84b82efda',
'upload_date': '20180109',
'timestamp': 1515530551,
},
'params': {
'skip_download': True,
},
}, {
# Youtube via provider
'url': 'http://digg.com/video/dog-boat-seal-play',
'only_matching': True,
}, {
# vimeo as regular embed
'url': 'http://digg.com/video/dream-girl-short-film',
'only_matching': True,
}]
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
info = self._parse_json(
self._search_regex(
r'(?s)video_info\s*=\s*({.+?});\n', webpage, 'video info',
default='{}'), display_id, transform_source=js_to_json,
fatal=False)
video_id = info.get('video_id')
if video_id:
provider = info.get('provider_name')
if provider == 'youtube':
return self.url_result(
video_id, ie='Youtube', video_id=video_id)
elif provider == 'jwplayer':
return self.url_result(
'jwplatform:%s' % video_id, ie='JWPlatform',
video_id=video_id)
return self.url_result(url, 'Generic')
| unlicense |
jpypi/dup-image-search | simple_hash/fast_simple_hash.py | 1 | 1638 | #!/usr/bin/env python2
"""
simple_hash.py
Generates a hash using the "simple" method outlined on:
http://www.hackerfactor.com/blog/index.php?/archives/432-Looks-Like-It.html
:author: Brandon Arrendondo
:author: James Jenkins
:license: MIT
"""
import sys
import argparse
import numpy
import glob
from PIL import Image
from multiprocessing import Pool
def calculate_simple_hash(image):
"""
Calculates the simple hash of an image.
The basic steps (verbatim from hackerfactor, see heading):
1. Reduce size to 8x8
2. Reduce color to greyscale
3. Average the colors
4. Compute the 64 bits - 1 if above average, 0 if not
5. Construct the hash
"""
# reduce size to 8x8
image = image.resize((8, 8))
# convert to greyscale
image = image.convert("L")
# average the colors
imgdata = image.getdata()
average = numpy.mean(imgdata)
image_hash = 0
for i in xrange(0, len(imgdata)):
image_hash |= (imgdata[i] > average) << i
return image_hash
def hash_directory(directory):
with open("simple_hashes.txt", "a") as f:
for filepath in glob.iglob("{0!s}/*".format(directory)):
try:
image = Image.open(filepath)
image_hash = calculate_simple_hash(image)
f.write("{0!s},{1!s}\n".format(image_hash, filepath))
except:
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("directory", help="directory to scan")
args = parser.parse_args()
hash_directory(args.directory)
| mit |
josephsnyder/VistA | Scripts/VistAMenuUtil.py | 2 | 8783 | #---------------------------------------------------------------------------
# Copyright 2013 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
from builtins import object
import re
import sys
# constant for reuse
DD_OUTPUT_FROM_WHAT_FILE = "OUTPUT FROM WHAT FILE:"
DD_INPUT_TO_WHAT_FILE = "INPUT TO WHAT FILE:"
"""
Utilitity Class to access VistA Menus System
"""
class VistAMenuUtil(object):
def __init__(self, duz):
self._duz = duz
pass
"""
If not specified,
in goto***Menu function call, vistAClient should be in ready state.
after exit***Menu function call, vistAClient should also be in ready state.
"""
"""
EVE System Menu
"""
def gotoSystemMenu(self, vistAClient):
connection = vistAClient.getConnection()
vistAClient.waitForPrompt()
connection.send("K \r")
vistAClient.waitForPrompt()
connection.send("S DUZ=%s D ^XUP\r" % self._duz)
connection.expect("Select OPTION NAME: ")
connection.send("EVE\r")
connection.expect("CHOOSE 1-")
connection.send("1\r")
index = connection.expect(["Select Systems Manager Menu ", "to continue"])
if index == 1:
connection.send("\r")
connection.expect("Select Systems Manager Menu ")
def exitSystemMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Systems Manager Menu ")
connection.send("\r")
connection.expect("Do you really want to halt\?")
connection.send("\r")
vistAClient.waitForPrompt()
connection.send("\r")
""" Programmer Options """
def gotoProgrammerMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoSystemMenu(vistAClient)
connection.send("Programmer Options\r")
connection.expect("Select Programmer Options ")
def exitProgrammerMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Programmer Options ")
connection.send("\r")
self.exitSystemMenu(vistAClient)
""" KIDS Menu SubSection """
def gotoKidsMainMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoProgrammerMenu(vistAClient)
connection.send("KIDS\r")
connection.expect("Select Kernel Installation \& Distribution System ")
def exitKidsMainMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Kernel Installation \& Distribution System")
connection.send("\r")
self.exitProgrammerMenu(vistAClient)
def gotoKidsUtilMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoKidsMainMenu(vistAClient)
connection.send("Utilities\r")
connection.expect("Select Utilities ")
def exitKidsUtilMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.send("\r")
self.exitKidsMainMenu(vistAClient)
""" Taskman Menu SubSection """
def gotoTaskmanMainMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoSystemMenu(vistAClient)
connection.send("Taskman Management\r")
connection.expect("Select Taskman Management ")
def exitTaskmanMainMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Taskman Management ")
connection.send("\r")
self.exitSystemMenu(vistAClient)
def gotoTaskmanMgrUtilMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoTaskmanMainMenu(vistAClient)
connection.send("Taskman Management Utilities\r")
connection.expect("Select Taskman Management Utilities ")
def exitTaskmanMgrUtilMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Taskman Management Utilities ")
connection.send("\r")
self.exitTaskmanMainMenu(vistAClient)
def gotoTaskmanEditParamMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoTaskmanMgrUtilMenu(vistAClient)
connection.send("Edit Taskman Parameters\r")
connection.expect("Select Edit Taskman Parameters ")
def exitTaskmanEditParamMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Edit Taskman Parameters ")
connection.send("\r")
self.exitTaskmanMgrUtilMenu(vistAClient)
""" HL7 Menu SubSection """
def gotoHL7MainMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoSystemMenu(vistAClient)
connection.send("HL7 Main Menu\r")
connection.expect("Select HL7 Main Menu ")
def exitHL7MainMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select HL7 Main Menu ")
connection.send("\r")
self.exitSystemMenu(vistAClient)
def gotoHL7FilerLinkMgrMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoHL7MainMenu(vistAClient)
connection.send("Filer and Link Management Options\r")
connection.expect("Select Filer and Link Management Options ")
def exitHL7FilerLinkMgrMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Filer and Link Management Options ")
connection.send("\r")
self.exitHL7MainMenu(vistAClient)
""" Mailman Menu Sub-Section """
def gotoMailmanMasterMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoSystemMenu(vistAClient)
connection.send("Mailman Master Menu\r")
connection.expect("Select MailMan Master Menu ")
def exitMailmanMasterMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select MailMan Master Menu ")
connection.send("\r")
self.exitSystemMenu(vistAClient)
def gotoMailmanManageMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoMailmanMasterMenu(vistAClient)
connection.send("Manage Mailman\r")
index = connection.expect(["to continue","Select Manage Mailman "])
if index == 0:
connection.send("^\r")
connection.expect("Select Manage Mailman ")
def exitMailmanManageMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Manage Mailman ")
connection.send("\r")
self.exitMailmanMasterMenu(vistAClient)
def gotoMailmanLocalDeliveryMgrMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoMailmanManageMenu(vistAClient)
connection.send("Local Delivery Management\r")
connection.expect("Select Local Delivery Management ")
def exitMailmanLocalDeliveryMgrMenu(self, vistAClient):
connection = vistAClient.getConnection()
connection.expect("Select Local Delivery Management ")
connection.send("\r")
self.exitMailmanManageMenu(vistAClient)
"""
FileMan Menu Section
"""
def gotoFileManMenu(self, vistAClient):
connection = vistAClient.getConnection()
vistAClient.waitForPrompt()
connection.send("S DUZ=%s D Q^DI\r" % self._duz)
connection.expect("Select OPTION:")
def exitFileManMenu(self, vistAClient, waitOption=True):
connection = vistAClient.getConnection()
if waitOption:
connection.expect("Select OPTION: ")
connection.send("\r")
vistAClient.waitForPrompt()
connection.send("\r")
def gotoFileManEditEnterEntryMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoFileManMenu(vistAClient)
connection.send("1\r" )# enter or edit entry
connection.expect(DD_INPUT_TO_WHAT_FILE)
def gotoFileManPrintFileEntryMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoFileManMenu(vistAClient)
connection.send("2\r" ) # print file entry
connection.expect(DD_OUTPUT_FROM_WHAT_FILE)
def gotoFileManSearchFileEntryMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoFileManMenu(vistAClient)
connection.send("3\r") # search file entry
connection.expect(DD_OUTPUT_FROM_WHAT_FILE)
def gotoFileManInquireFileEntryMenu(self, vistAClient):
connection = vistAClient.getConnection()
self.gotoFileManMenu(vistAClient)
connection.send("5\r" ) # inquiry file entry
connection.expect(DD_OUTPUT_FROM_WHAT_FILE)
| apache-2.0 |
taget/node | scripts/password.py | 1 | 2111 | #!/usr/bin/python
# password.py - Copyright (C) 2010 Red Hat, Inc.
# Written by Joey Boggs <jboggs@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA. A copy of the GNU General Public License is
# also available at http://www.gnu.org/copyleft/gpl.html.
from ovirtnode.ovirtfunctions import *
import libuser
import random
import crypt
def cryptPassword(password):
saltlen = 2
algo = 'sha512'
saltlen = 16
saltstr = '$6$'
for i in range(saltlen):
saltstr = saltstr + random.choice (string.letters +
string.digits + './')
return crypt.crypt (password, saltstr)
def set_password(password, user):
admin = libuser.admin()
root = admin.lookupUserByName(user)
passwd = cryptPassword(password)
unmount_config("/etc/shadow")
admin.setpassUser(root, passwd, "is_crypted")
ovirt_store_config("/etc/shadow")
return True
def check_ssh_password_auth():
password_auth_status = augeas.Augeas("root=/")
password_auth_status.get("/files/etc/ssh/sshd_config/PasswordAuthentication")
return password_auth_status
def toggle_ssh_access():
ssh_config = augeas.Augeas("root=/")
ssh_config.set("/files/etc/ssh/sshd_config", OVIRT_VARS["ssh_pass_enabled"])
ssh_config.save()
ovirt_store_config("/etc/ssh/sshd_config")
rc = system_closefds("service sshd reload")
return rc
def set_sasl_password(user, password):
system_closefds("saslpasswd2 -a libvirt -p %s") % user
| gpl-2.0 |
jboeuf/grpc | tools/distrib/python/grpcio_tools/grpc_tools/protoc.py | 20 | 1198 | #!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pkg_resources
import sys
from grpc_tools import _protoc_compiler
def main(command_arguments):
"""Run the protocol buffer compiler with the given command-line arguments.
Args:
command_arguments: a list of strings representing command line arguments to
`protoc`.
"""
command_arguments = [argument.encode() for argument in command_arguments]
return _protoc_compiler.run_main(command_arguments)
if __name__ == '__main__':
proto_include = pkg_resources.resource_filename('grpc_tools', '_proto')
sys.exit(main(sys.argv + ['-I{}'.format(proto_include)]))
| apache-2.0 |
hendradarwin/ITK | Wrapping/Generators/Python/Tests/notYetUsable/itkCurvatureFlowTestPython2.py | 11 | 3693 | #==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
from InsightToolkit import *
import itkTesting
import sys
import os
import shutil
basename = os.path.basename(sys.argv[0])
name = os.path.splitext(basename)[0]
dir = "Algorithms"
testInput = itkTesting.ITK_TEST_INPUT
testOutput = itkTesting.ITK_TEST_OUTPUT
baseLine = itkTesting.ITK_TEST_BASELINE
reader = itkImageFileReaderF2_New()
reader.SetFileName(testInput + "/cthead1.png")
cf = itkCurvatureFlowImageFilterF2F2_New()
cf.SetInput(reader.GetOutput())
cf.SetTimeStep(0.25)
cf.SetNumberOfIterations(10)
cfss = itkShiftScaleImageFilterF2US2_New()
cfss.SetInput(cf.GetOutput())
cfss.SetShift(0.7)
cfss.SetScale(0.9)
valid = itkImageFileReaderUS2_New()
valid.SetFileName(baseLine + "/" + dir + "/" + name + ".png")
diff = itkDifferenceImageFilterUS2_New()
diff.SetValidInput(valid.GetOutput())
diff.SetTestInput(cfss.GetOutput())
diff.SetToleranceRadius(1)
diff.SetDifferenceThreshold(0)
diff.Update()
meanDiff = diff.GetMeanDifference()
totalDiff = diff.GetTotalDifference()
print("MeanDifference = ", meanDiff)
print("TotalDifference = ", totalDiff)
print ("<DartMeasurement name=\"MeanDifference\" type=\"numeric/double\">",
meanDiff, "</DartMeasurement>")
print ("<DartMeasurement name=\"TotalDifference\" type=\"numeric/double\">",
totalDiff, "</DartMeasurement>")
if (meanDiff > 0.1):
convert = itkCastImageFilterUS2UC2_New()
rescale = itkRescaleIntensityImageFilterUS2UC2_New()
rescale.SetInput(diff.GetOutput())
rescale.SetOutputMinimum(0)
rescale.SetOutputMaximum(255)
io = itkPNGImageIO_New()
io.SetUseCompression(1)
io.SetCompressionLevel(9)
writer = itkImageFileWriterUC2_New()
writer.SetImageIO(io.GetPointer())
writer.SetInput(convert.GetOutput())
writer.SetFileName(testOutput + "/" + name + ".test.png")
convert.SetInput(cfss.GetOutput())
writer.Write()
writer.SetFileName(testOutput + "/" + name + ".diff.png")
writer.SetInput(rescale.GetOutput())
writer.Write()
shutil.copyfile(
baseLine +
"/" +
dir +
"/" +
name +
".png",
testOutput +
"/" +
name +
".valid.png")
print ("<DartMeasurementFile name=\"TestImage\" type=\"image/png\">" +
testOutput + "/" + name + ".test.png</DartMeasurementFile>")
print ("<DartMeasurementFile name=\"DifferenceImage\" type=\"image/png\">"
+ testOutput + "/" + name + ".diff.png</DartMeasurementFile>")
print ("<DartMeasurementFile name=\"ValidImage\" type=\"image/png\">" +
testOutput + "/" + name + ".valid.png</DartMeasurementFile>")
pr = "<DartMeasurement name=\"DifferenceShift\" type=\"numeric/double\">"
print (pr, rescale.GetShift(), "</DartMeasurement>")
pr = "<DartMeasurement name=\"DifferenceScale\" type=\"numeric/double\">"
print (pr, rescale.GetScale(), "</DartMeasurement>")
# return 1
# return 0
| apache-2.0 |
vybstat/scikit-learn | examples/linear_model/plot_sgd_iris.py | 286 | 2202 | """
========================================
Plot multi-class SGD on the iris dataset
========================================
Plot decision surface of multi-class SGD on iris dataset.
The hyperplanes corresponding to the three one-versus-all (OVA) classifiers
are represented by the dashed lines.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.linear_model import SGDClassifier
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:, :2] # we only take the first two features. We could
# avoid this ugly slicing by using a two-dim dataset
y = iris.target
colors = "bry"
# shuffle
idx = np.arange(X.shape[0])
np.random.seed(13)
np.random.shuffle(idx)
X = X[idx]
y = y[idx]
# standardize
mean = X.mean(axis=0)
std = X.std(axis=0)
X = (X - mean) / std
h = .02 # step size in the mesh
clf = SGDClassifier(alpha=0.001, n_iter=100).fit(X, y)
# create a mesh to plot in
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, m_max]x[y_min, y_max].
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
cs = plt.contourf(xx, yy, Z, cmap=plt.cm.Paired)
plt.axis('tight')
# Plot also the training points
for i, color in zip(clf.classes_, colors):
idx = np.where(y == i)
plt.scatter(X[idx, 0], X[idx, 1], c=color, label=iris.target_names[i],
cmap=plt.cm.Paired)
plt.title("Decision surface of multi-class SGD")
plt.axis('tight')
# Plot the three one-against-all classifiers
xmin, xmax = plt.xlim()
ymin, ymax = plt.ylim()
coef = clf.coef_
intercept = clf.intercept_
def plot_hyperplane(c, color):
def line(x0):
return (-(x0 * coef[c, 0]) - intercept[c]) / coef[c, 1]
plt.plot([xmin, xmax], [line(xmin), line(xmax)],
ls="--", color=color)
for i, color in zip(clf.classes_, colors):
plot_hyperplane(i, color)
plt.legend()
plt.show()
| bsd-3-clause |
gamingrobot/SpockBot | spockbot/plugins/helpers/entities.py | 1 | 6663 | """
An entity tracker
"""
from spockbot.mcdata.utils import Info
from spockbot.plugins.base import PluginBase, pl_announce
class MCEntity(Info):
eid = 0
status = 0
nbt = None
metadata = None
class MovementEntity(MCEntity):
x = 0
y = 0
z = 0
yaw = 0
pitch = 0
on_ground = True
class PlayerEntity(MovementEntity):
uuid = 0
current_item = 0
metadata = None
class ObjectEntity(MovementEntity):
obj_type = 0
obj_data = 0
speed_x = 0
speed_y = 0
speed_z = 0
class MobEntity(MovementEntity):
mob_type = 0
head_pitch = 0
head_yaw = 0
velocity_x = 0
velocity_y = 0
velocity_z = 0
metadata = None
class PaintingEntity(MCEntity):
title = ""
location = {
'x': 0,
'y': 0,
'z': 0,
}
direction = 0
class ExpEntity(MCEntity):
x = 0
y = 0
z = 0
count = 0
class GlobalEntity(MCEntity):
global_type = 0
x = 0
y = 0
z = 0
class EntitiesCore(object):
def __init__(self):
self.client_player = MCEntity()
self.entities = {}
self.players = {}
self.mobs = {}
self.objects = {}
self.paintings = {}
self.exp_orbs = {}
self.global_entities = {}
@pl_announce('Entities')
class EntitiesPlugin(PluginBase):
requires = 'Event'
events = {
'PLAY<Join Game': 'handle_join_game',
'PLAY<Spawn Player': 'handle_spawn_player',
'PLAY<Spawn Object': 'handle_spawn_object',
'PLAY<Spawn Mob': 'handle_spawn_mob',
'PLAY<Spawn Painting': 'handle_spawn_painting',
'PLAY<Spawn Experience Orb': 'handle_spawn_experience_orb',
'PLAY<Destroy Entities': 'handle_destroy_entities',
'PLAY<Entity Equipment': 'handle_unhandled',
'PLAY<Entity Velocity': 'handle_velocity',
'PLAY<Entity Relative Move': 'handle_relative_move',
'PLAY<Entity Look': 'handle_set_dict',
'PLAY<Entity Look and Relative Move': 'handle_relative_move',
'PLAY<Entity Teleport': 'handle_set_dict',
'PLAY<Entity Head Look': 'handle_set_dict',
'PLAY<Entity Status': 'handle_set_dict',
'PLAY<Entity Metadata': 'handle_set_dict',
'PLAY<Entity Effect': 'handle_unhandled',
'PLAY<Remove Entity Effect': 'handle_unhandled',
'PLAY<Entity Properties': 'handle_unhandled',
'PLAY<Spawn Global Entity': 'handle_spawn_global_entity',
'PLAY<Update Entity NBT': 'handle_set_dict',
}
def __init__(self, ploader, settings):
super(EntitiesPlugin, self).__init__(ploader, settings)
self.ec = EntitiesCore()
ploader.provides('Entities', self.ec)
# TODO: Implement all these things
def handle_unhandled(self, event, packet):
pass
def handle_join_game(self, event, packet):
self.ec.client_player.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = self.ec.client_player
def handle_spawn_player(self, event, packet):
entity = PlayerEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.players[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
self.event.emit('entity_player_spawn', entity)
def handle_spawn_object(self, event, packet):
entity = ObjectEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.objects[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_spawn_mob(self, event, packet):
entity = MobEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.mobs[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
self.event.emit('entity_mob_spawn', entity)
def handle_spawn_painting(self, event, packet):
entity = PaintingEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.paintings[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_spawn_experience_orb(self, event, packet):
entity = ExpEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.exp_orbs[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_spawn_global_entity(self, event, packet):
entity = GlobalEntity()
entity.set_dict(packet.data)
self.ec.entities[packet.data['eid']] = entity
self.ec.global_entities[packet.data['eid']] = entity
self.event.emit('entity_spawn', {'entity': entity})
def handle_destroy_entities(self, event, packet):
for eid in packet.data['eids']:
if eid in self.ec.entities:
entity = self.ec.entities[eid]
del self.ec.entities[eid]
if eid in self.ec.players:
del self.ec.players[eid]
elif eid in self.ec.objects:
del self.ec.objects[eid]
elif eid in self.ec.mobs:
del self.ec.mobs[eid]
elif eid in self.ec.paintings:
del self.ec.paintings[eid]
elif eid in self.ec.exp_orbs:
del self.ec.exp_orbs[eid]
elif eid in self.ec.global_entities:
del self.ec.global_entities[eid]
self.event.emit('entity_destroy', {'entity': entity})
def handle_relative_move(self, event, packet):
if packet.data['eid'] in self.ec.entities:
entity = self.ec.entities[packet.data['eid']]
old_pos = [entity.x, entity.y, entity.z]
entity.set_dict(packet.data)
entity.x = entity.x + packet.data['dx']
entity.y = entity.y + packet.data['dy']
entity.z = entity.z + packet.data['dz']
self.event.emit('entity_move',
{'entity': entity, 'old_pos': old_pos})
def handle_velocity(self, event, packet):
if packet.data['eid'] in self.ec.entities:
self.ec.entities[packet.data['eid']].set_dict(packet.data)
if packet.data['eid'] == self.ec.client_player.eid:
self.event.emit('entity_player_velocity', packet.data)
def handle_set_dict(self, event, packet):
if packet.data['eid'] in self.ec.entities:
self.ec.entities[packet.data['eid']].set_dict(packet.data)
| mit |
netgroup/dreamer-ryu | ryu/services/protocols/vrrp/monitor_linux.py | 22 | 8398 | # Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2013 Isaku Yamahata <yamahata at private email ne jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import socket
import struct
from ryu.controller import handler
from ryu.ofproto import ether
from ryu.ofproto import inet
from ryu.lib import addrconv
from ryu.lib import hub
from ryu.lib.packet import arp
from ryu.lib.packet import vrrp
from ryu.services.protocols.vrrp import monitor
from ryu.services.protocols.vrrp import event as vrrp_event
from ryu.services.protocols.vrrp import utils
# Those are not defined in socket module
SS_MAXSIZE = 128
MCAST_JOIN_GROUP = 42
MCAST_LEAVE_GROUP = 45
PACKET_ADD_MEMBERSHIP = 1
PACKET_DROP_MEMBERSHIP = 2
PACKET_MR_MULTICAST = 0
SOL_PACKET = 263
def if_nametoindex(ifname):
filename = '/sys/class/net/' + ifname + '/ifindex'
with contextlib.closing(open(filename)) as f:
for line in f:
return int(line)
@monitor.VRRPInterfaceMonitor.register(vrrp_event.VRRPInterfaceNetworkDevice)
class VRRPInterfaceMonitorNetworkDevice(monitor.VRRPInterfaceMonitor):
"""
This module uses raw socket so that privilege(CAP_NET_ADMIN capability)
is required.
"""
def __init__(self, *args, **kwargs):
super(VRRPInterfaceMonitorNetworkDevice, self).__init__(*args,
**kwargs)
self.__is_active = True
config = self.config
if config.is_ipv6:
family = socket.AF_INET6
ether_type = ether.ETH_TYPE_IPV6
mac_address = vrrp.vrrp_ipv6_src_mac_address(config.vrid)
else:
family = socket.AF_INET
ether_type = ether.ETH_TYPE_IP
mac_address = vrrp.vrrp_ipv4_src_mac_address(config.vrid)
# socket module doesn't define IPPROTO_VRRP
self.ip_socket = socket.socket(family, socket.SOCK_RAW,
inet.IPPROTO_VRRP)
self.packet_socket = socket.socket(socket.AF_PACKET, socket.SOCK_RAW,
socket.htons(ether_type))
self.packet_socket.bind((self.interface.device_name, ether_type,
socket.PACKET_MULTICAST,
arp.ARP_HW_TYPE_ETHERNET,
addrconv.mac.text_to_bin(mac_address)))
self.ifindex = if_nametoindex(self.interface.device_name)
def start(self):
# discard received packets before joining multicast membership
packet_socket = self.packet_socket
packet_socket.setblocking(0)
with hub.Timeout(0.1, False):
while True:
try:
packet_socket.recv(1500)
except socket.error:
break
packet_socket.setblocking(1)
self._join_multicast_membership(True)
self._join_vrrp_group(True)
super(VRRPInterfaceMonitorNetworkDevice, self).start()
self.threads.append(hub.spawn(self._recv_loop))
def stop(self):
self.__is_active = False
super(VRRPInterfaceMonitorNetworkDevice, self).stop()
# we assume that the structures in the following two functions for
# multicast are aligned in the same way on all the archtectures.
def _join_multicast_membership(self, join_leave):
config = self.config
if config.is_ipv6:
mac_address = vrrp.vrrp_ipv6_src_mac_address(config.vrid)
else:
mac_address = vrrp.vrrp_ipv4_src_mac_address(config.vrid)
if join_leave:
add_drop = PACKET_ADD_MEMBERSHIP
else:
add_drop = PACKET_DROP_MEMBERSHIP
# struct packet_mreq {
# int mr_ifindex;
# unsigned short mr_type;
# unsigned short mr_alen;
# unsigned char mr_mr_address[8];
# };
packet_mreq = struct.pack('IHH8s', self.ifindex,
PACKET_MR_MULTICAST, 6,
addrconv.mac.text_to_bin(mac_address))
self.packet_socket.setsockopt(SOL_PACKET, add_drop, packet_mreq)
def _join_vrrp_group(self, join_leave):
if join_leave:
join_leave = MCAST_JOIN_GROUP
else:
join_leave = MCAST_LEAVE_GROUP
# struct group_req {
# __u32 gr_interface; /* interface index */
# struct __kernel_sockaddr_storage gr_group; /* group address */
# };
group_req = struct.pack('I', self.ifindex)
# padding to gr_group. This is environment dependent
group_req += '\x00' * (struct.calcsize('P') - struct.calcsize('I'))
if self.config.is_ipv6:
# struct sockaddr_in6 {
# sa_family_t sin6_family; /* AF_INET6 */
# in_port_t sin6_port; /* port number */
# uint32_t sin6_flowinfo; /* IPv6 flow information */
# struct in6_addr sin6_addr; /* IPv6 address */
# uint32_t sin6_scope_id; /* Scope ID (new in 2.4) */
# };
# struct in6_addr {
# unsigned char s6_addr[16]; /* IPv6 address */
# };
family = socket.IPPROTO_IPV6
sockaddr = struct.pack('H', socket.AF_INET6)
sockaddr += struct.pack('!H', 0)
sockaddr += struct.pack('!I', 0)
sockaddr += addrconv.ipv6.text_to_bin(vrrp.VRRP_IPV6_DST_ADDRESS)
sockaddr += struct.pack('I', 0)
else:
# #define __SOCK_SIZE__ 16 /* sizeof(struct sockaddr) */
# struct sockaddr_in {
# __kernel_sa_family_t sin_family; /* Address family */
# __be16 sin_port; /* Port number */
# struct in_addr sin_addr; /* Internet address */
# /* Pad to size of `struct sockaddr'. */
# unsigned char __pad[__SOCK_SIZE__ - sizeof(short int) -
# sizeof(unsigned short int) - sizeof(struct in_addr)];
# };
# struct in_addr {
# __be32 s_addr;
# };
family = socket.IPPROTO_IP
sockaddr = struct.pack('H', socket.AF_INET)
sockaddr += struct.pack('!H', 0)
sockaddr += addrconv.ipv4.text_to_bin(vrrp.VRRP_IPV4_DST_ADDRESS)
sockaddr += '\x00' * (SS_MAXSIZE - len(sockaddr))
group_req += sockaddr
self.ip_socket.setsockopt(family, join_leave, group_req)
return
def _recv_loop(self):
packet_socket = self.packet_socket
packet_socket.settimeout(1.3) # to check activeness periodically
try:
while self.__is_active:
try:
buf = packet_socket.recv(128)
except socket.timeout:
self.logger.debug('timeout')
continue
except:
self.logger.error('recv failed')
continue
if len(buf) == 0:
self.__is_active = False
break
self.logger.debug('recv buf')
self._send_vrrp_packet_received(buf)
finally:
self._join_vrrp_group(False)
self._join_multicast_membership(False)
@handler.set_ev_handler(vrrp_event.EventVRRPTransmitRequest)
def vrrp_transmit_request_handler(self, ev):
self.logger.debug('send')
try:
self.packet_socket.sendto(ev.data,
(self.interface.device_name, 0))
except:
self.logger.error('send failed')
def _initialize(self):
# nothing
pass
def _shutdown(self):
self.__is_active = False
| apache-2.0 |
OpenERPJeff/gooderp_addons | sell/report/sell_summary_goods.py | 6 | 6707 | # -*- coding: utf-8 -*-
from odoo import fields, models, api
import odoo.addons.decimal_precision as dp
import datetime
class SellSummaryGoods(models.Model):
_name = 'sell.summary.goods'
_inherit = 'report.base'
_description = u'销售汇总表(按商品)'
id_lists = fields.Text(u'移动明细行id列表')
goods_categ = fields.Char(u'商品类别')
goods_code = fields.Char(u'商品编码')
goods = fields.Char(u'商品名称')
attribute = fields.Char(u'属性')
warehouse = fields.Char(u'仓库')
qty_uos = fields.Float(u'辅助数量', digits=dp.get_precision('Quantity'))
uos = fields.Char(u'辅助单位')
qty = fields.Float(u'基本数量', digits=dp.get_precision('Quantity'))
uom = fields.Char(u'基本单位')
price = fields.Float(u'单价', digits=dp.get_precision('Price'))
amount = fields.Float(u'销售收入', digits=dp.get_precision('Amount'))
tax_amount = fields.Float(u'税额', digits=dp.get_precision('Amount'))
subtotal = fields.Float(u'价税合计', digits=dp.get_precision('Amount'))
margin = fields.Float(u'毛利', digits=dp.get_precision('Amount'))
def select_sql(self, sql_type='out'):
return '''
SELECT MIN(wml.id) as id,
array_agg(wml.id) AS id_lists,
categ.name AS goods_categ,
goods.code AS goods_code,
goods.name AS goods,
attr.name AS attribute,
wh.name AS warehouse,
SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_uos_qty
ELSE - wml.goods_uos_qty END) AS qty_uos,
uos.name AS uos,
SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty
ELSE - wml.goods_qty END) AS qty,
uom.name AS uom,
(CASE WHEN SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty
ELSE - wml.goods_qty END) = 0 THEN 0
ELSE
SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.amount
ELSE - wml.amount END)
/ SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty
ELSE - wml.goods_qty END)
END) AS price,
SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.amount
ELSE - wml.amount END) AS amount,
SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.tax_amount
ELSE - wml.tax_amount END) AS tax_amount,
SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.subtotal
ELSE - wml.subtotal END) AS subtotal,
(SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.amount
ELSE - wml.amount END) - SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty
ELSE - wml.goods_qty END) * wml.cost_unit) AS margin
'''
def from_sql(self, sql_type='out'):
return '''
FROM wh_move_line AS wml
LEFT JOIN wh_move wm ON wml.move_id = wm.id
LEFT JOIN partner ON wm.partner_id = partner.id
LEFT JOIN goods ON wml.goods_id = goods.id
LEFT JOIN core_category AS categ ON goods.category_id = categ.id
LEFT JOIN attribute AS attr ON wml.attribute_id = attr.id
LEFT JOIN warehouse AS wh ON wml.warehouse_id = wh.id
OR wml.warehouse_dest_id = wh.id
LEFT JOIN uom AS uos ON goods.uos_id = uos.id
LEFT JOIN uom ON goods.uom_id = uom.id
'''
def where_sql(self, sql_type='out'):
extra = ''
if self.env.context.get('partner_id'):
extra += 'AND partner.id = {partner_id}'
if self.env.context.get('goods_id'):
extra += 'AND goods.id = {goods_id}'
if self.env.context.get('goods_categ_id'):
extra += 'AND categ.id = {goods_categ_id}'
if self.env.context.get('warehouse_id'):
extra += 'AND wh.id = {warehouse_id}'
return '''
WHERE wml.state = 'done'
AND wml.date >= '{date_start}'
AND wml.date < '{date_end}'
AND wm.origin like 'sell.delivery%%'
AND wh.type = 'stock'
%s
''' % extra
def group_sql(self, sql_type='out'):
return '''
GROUP BY goods_categ,goods_code,goods,attribute,warehouse,uos,uom,wml.cost_unit
'''
def order_sql(self, sql_type='out'):
return '''
ORDER BY goods_code,goods,attribute,warehouse
'''
def get_context(self, sql_type='out', context=None):
date_end = datetime.datetime.strptime(
context.get('date_end'), '%Y-%m-%d') + datetime.timedelta(days=1)
date_end = date_end.strftime('%Y-%m-%d')
return {
'date_start': context.get('date_start') or '',
'date_end': date_end,
'partner_id': context.get('partner_id') and context.get('partner_id')[0] or '',
'goods_id': context.get('goods_id') and context.get('goods_id')[0] or '',
'goods_categ_id': context.get('goods_categ_id') and context.get('goods_categ_id')[0] or '',
'warehouse_id': context.get('warehouse_id') and context.get('warehouse_id')[0] or '',
}
def _compute_order(self, result, order):
order = order or 'goods_code ASC'
return super(SellSummaryGoods, self)._compute_order(result, order)
def collect_data_by_sql(self, sql_type='out'):
collection = self.execute_sql(sql_type='out')
return collection
@api.multi
def view_detail(self):
'''销售汇总表(按商品)查看明细按钮'''
self.ensure_one()
line_ids = []
res = []
move_lines = []
result = self.get_data_from_cache()
for line in result:
if line.get('id') == self.id:
line_ids = line.get('id_lists')
move_lines = self.env['wh.move.line'].search(
[('id', 'in', line_ids)])
for move_line in move_lines:
details = self.env['sell.order.detail'].search(
[('order_name', '=', move_line.move_id.name),
('goods_id', '=', move_line.goods_id.id)])
for detail in details:
res.append(detail.id)
return {
'name': u'销售明细表',
'view_mode': 'tree',
'view_id': False,
'res_model': 'sell.order.detail',
'type': 'ir.actions.act_window',
'domain': [('id', 'in', res)],
}
| agpl-3.0 |
0x46616c6b/ansible-modules-core | system/seboolean.py | 90 | 6637 | #!/usr/bin/python
# (c) 2012, Stephen Fromm <sfromm@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: seboolean
short_description: Toggles SELinux booleans.
description:
- Toggles SELinux booleans.
version_added: "0.7"
options:
name:
description:
- Name of the boolean to configure
required: true
default: null
persistent:
description:
- Set to C(yes) if the boolean setting should survive a reboot
required: false
default: no
choices: [ "yes", "no" ]
state:
description:
- Desired boolean value
required: true
default: null
choices: [ 'yes', 'no' ]
notes:
- Not tested on any debian based system
requirements: [ ]
author: Stephen Fromm
'''
EXAMPLES = '''
# Set (httpd_can_network_connect) flag on and keep it persistent across reboots
- seboolean: name=httpd_can_network_connect state=yes persistent=yes
'''
try:
import selinux
HAVE_SELINUX=True
except ImportError:
HAVE_SELINUX=False
try:
import semanage
HAVE_SEMANAGE=True
except ImportError:
HAVE_SEMANAGE=False
def has_boolean_value(module, name):
bools = []
try:
rc, bools = selinux.security_get_boolean_names()
except OSError, e:
module.fail_json(msg="Failed to get list of boolean names")
if name in bools:
return True
else:
return False
def get_boolean_value(module, name):
state = 0
try:
state = selinux.security_get_boolean_active(name)
except OSError, e:
module.fail_json(msg="Failed to determine current state for boolean %s" % name)
if state == 1:
return True
else:
return False
# The following method implements what setsebool.c does to change
# a boolean and make it persist after reboot..
def semanage_boolean_value(module, name, state):
rc = 0
value = 0
if state:
value = 1
handle = semanage.semanage_handle_create()
if handle is None:
module.fail_json(msg="Failed to create semanage library handle")
try:
managed = semanage.semanage_is_managed(handle)
if managed < 0:
module.fail_json(msg="Failed to determine whether policy is manage")
if managed == 0:
if os.getuid() == 0:
module.fail_json(msg="Cannot set persistent booleans without managed policy")
else:
module.fail_json(msg="Cannot set persistent booleans; please try as root")
if semanage.semanage_connect(handle) < 0:
module.fail_json(msg="Failed to connect to semanage")
if semanage.semanage_begin_transaction(handle) < 0:
module.fail_json(msg="Failed to begin semanage transaction")
rc, sebool = semanage.semanage_bool_create(handle)
if rc < 0:
module.fail_json(msg="Failed to create seboolean with semanage")
if semanage.semanage_bool_set_name(handle, sebool, name) < 0:
module.fail_json(msg="Failed to set seboolean name with semanage")
semanage.semanage_bool_set_value(sebool, value)
rc, boolkey = semanage.semanage_bool_key_extract(handle, sebool)
if rc < 0:
module.fail_json(msg="Failed to extract boolean key with semanage")
if semanage.semanage_bool_modify_local(handle, boolkey, sebool) < 0:
module.fail_json(msg="Failed to modify boolean key with semanage")
if semanage.semanage_bool_set_active(handle, boolkey, sebool) < 0:
module.fail_json(msg="Failed to set boolean key active with semanage")
semanage.semanage_bool_key_free(boolkey)
semanage.semanage_bool_free(sebool)
semanage.semanage_set_reload(handle, 0)
if semanage.semanage_commit(handle) < 0:
module.fail_json(msg="Failed to commit changes to semanage")
semanage.semanage_disconnect(handle)
semanage.semanage_handle_destroy(handle)
except Exception, e:
module.fail_json(msg="Failed to manage policy for boolean %s: %s" % (name, str(e)))
return True
def set_boolean_value(module, name, state):
rc = 0
value = 0
if state:
value = 1
try:
rc = selinux.security_set_boolean(name, value)
except OSError, e:
module.fail_json(msg="Failed to set boolean %s to %s" % (name, value))
if rc == 0:
return True
else:
return False
def main():
module = AnsibleModule(
argument_spec = dict(
name=dict(required=True),
persistent=dict(default='no', type='bool'),
state=dict(required=True, type='bool')
),
supports_check_mode=True
)
if not HAVE_SELINUX:
module.fail_json(msg="This module requires libselinux-python support")
if not HAVE_SEMANAGE:
module.fail_json(msg="This module requires libsemanage-python support")
if not selinux.is_selinux_enabled():
module.fail_json(msg="SELinux is disabled on this host.")
name = module.params['name']
persistent = module.params['persistent']
state = module.params['state']
result = {}
result['name'] = name
if not has_boolean_value(module, name):
module.fail_json(msg="SELinux boolean %s does not exist." % name)
cur_value = get_boolean_value(module, name)
if cur_value == state:
result['state'] = cur_value
result['changed'] = False
module.exit_json(**result)
if module.check_mode:
module.exit_json(changed=True)
if persistent:
r = semanage_boolean_value(module, name, state)
else:
r = set_boolean_value(module, name, state)
result['changed'] = r
if not r:
module.fail_json(msg="Failed to set boolean %s to %s" % (name, value))
try:
selinux.security_commit_booleans()
except:
module.fail_json(msg="Failed to commit pending boolean %s value" % name)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
IsCoolEntertainment/debpkg_python-paramiko | paramiko/kex_group1.py | 8 | 5300 | # Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Standard SSH key exchange ("kex" if you wanna sound cool). Diffie-Hellman of
1024 bit key halves, using a known "p" prime and "g" generator.
"""
from Crypto.Hash import SHA
from paramiko.common import *
from paramiko import util
from paramiko.message import Message
from paramiko.ssh_exception import SSHException
_MSG_KEXDH_INIT, _MSG_KEXDH_REPLY = range(30, 32)
# draft-ietf-secsh-transport-09.txt, page 17
P = 0xFFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A637ED6B0BFF5CB6F406B7EDEE386BFB5A899FA5AE9F24117C4B1FE649286651ECE65381FFFFFFFFFFFFFFFFL
G = 2
class KexGroup1(object):
name = 'diffie-hellman-group1-sha1'
def __init__(self, transport):
self.transport = transport
self.x = 0L
self.e = 0L
self.f = 0L
def start_kex(self):
self._generate_x()
if self.transport.server_mode:
# compute f = g^x mod p, but don't send it yet
self.f = pow(G, self.x, P)
self.transport._expect_packet(_MSG_KEXDH_INIT)
return
# compute e = g^x mod p (where g=2), and send it
self.e = pow(G, self.x, P)
m = Message()
m.add_byte(chr(_MSG_KEXDH_INIT))
m.add_mpint(self.e)
self.transport._send_message(m)
self.transport._expect_packet(_MSG_KEXDH_REPLY)
def parse_next(self, ptype, m):
if self.transport.server_mode and (ptype == _MSG_KEXDH_INIT):
return self._parse_kexdh_init(m)
elif not self.transport.server_mode and (ptype == _MSG_KEXDH_REPLY):
return self._parse_kexdh_reply(m)
raise SSHException('KexGroup1 asked to handle packet type %d' % ptype)
### internals...
def _generate_x(self):
# generate an "x" (1 < x < q), where q is (p-1)/2.
# p is a 128-byte (1024-bit) number, where the first 64 bits are 1.
# therefore q can be approximated as a 2^1023. we drop the subset of
# potential x where the first 63 bits are 1, because some of those will be
# larger than q (but this is a tiny tiny subset of potential x).
while 1:
x_bytes = self.transport.rng.read(128)
x_bytes = chr(ord(x_bytes[0]) & 0x7f) + x_bytes[1:]
if (x_bytes[:8] != '\x7F\xFF\xFF\xFF\xFF\xFF\xFF\xFF') and \
(x_bytes[:8] != '\x00\x00\x00\x00\x00\x00\x00\x00'):
break
self.x = util.inflate_long(x_bytes)
def _parse_kexdh_reply(self, m):
# client mode
host_key = m.get_string()
self.f = m.get_mpint()
if (self.f < 1) or (self.f > P - 1):
raise SSHException('Server kex "f" is out of range')
sig = m.get_string()
K = pow(self.f, self.x, P)
# okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K)
hm = Message()
hm.add(self.transport.local_version, self.transport.remote_version,
self.transport.local_kex_init, self.transport.remote_kex_init)
hm.add_string(host_key)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
self.transport._set_K_H(K, SHA.new(str(hm)).digest())
self.transport._verify_key(host_key, sig)
self.transport._activate_outbound()
def _parse_kexdh_init(self, m):
# server mode
self.e = m.get_mpint()
if (self.e < 1) or (self.e > P - 1):
raise SSHException('Client kex "e" is out of range')
K = pow(self.e, self.x, P)
key = str(self.transport.get_server_key())
# okay, build up the hash H of (V_C || V_S || I_C || I_S || K_S || e || f || K)
hm = Message()
hm.add(self.transport.remote_version, self.transport.local_version,
self.transport.remote_kex_init, self.transport.local_kex_init)
hm.add_string(key)
hm.add_mpint(self.e)
hm.add_mpint(self.f)
hm.add_mpint(K)
H = SHA.new(str(hm)).digest()
self.transport._set_K_H(K, H)
# sign it
sig = self.transport.get_server_key().sign_ssh_data(self.transport.rng, H)
# send reply
m = Message()
m.add_byte(chr(_MSG_KEXDH_REPLY))
m.add_string(key)
m.add_mpint(self.f)
m.add_string(str(sig))
self.transport._send_message(m)
self.transport._activate_outbound()
| lgpl-2.1 |
pietroquaglio/elephant | elephant/unitary_event_analysis.py | 2 | 27650 | # -*- coding: utf-8 -*-
"""
Unitary Event (UE) analysis is a statistical method that
enables to analyze in a time resolved manner excess spike correlation
between simultaneously recorded neurons by comparing the empirical
spike coincidences (precision of a few ms) to the expected number
based on the firing rates of the neurons.
References:
- Gruen, Diesmann, Grammont, Riehle, Aertsen (1999) J Neurosci Methods,
94(1): 67-79.
- Gruen, Diesmann, Aertsen (2002a,b) Neural Comput, 14(1): 43-80; 81-19.
- Gruen S, Riehle A, and Diesmann M (2003) Effect of cross-trial
nonstationarity on joint-spike events Biological Cybernetics 88(5):335-351.
- Gruen S (2009) Data-driven significance estimation of precise spike
correlation. J Neurophysiology 101:1126-1140 (invited review)
:copyright: Copyright 2015-2016 by the Elephant team, see AUTHORS.txt.
:license: Modified BSD, see LICENSE.txt for details.
"""
import numpy as np
import quantities as pq
import neo
import warnings
import elephant.conversion as conv
import scipy
def hash_from_pattern(m, N, base=2):
"""
Calculate for a spike pattern or a matrix of spike patterns
(provide each pattern as a column) composed of N neurons a
unique number.
Parameters:
-----------
m: 2-dim ndarray
spike patterns represented as a binary matrix (i.e., matrix of 0's and 1's).
Rows and columns correspond to patterns and neurons, respectively.
N: integer
number of neurons is required to be equal to the number
of rows
base: integer
base for calculation of hash values from binary
sequences (= pattern).
Default is 2
Returns:
--------
list of integers:
An array containing the hash values of each pattern,
shape: (number of patterns)
Raises:
-------
ValueError: if matrix m has wrong orientation
Examples:
---------
descriptive example:
m = [0
1
1]
N = 3
base = 2
hash = 0*2^2 + 1*2^1 + 1*2^0 = 3
second example:
>>> import numpy as np
>>> m = np.array([[0, 1, 0, 0, 1, 1, 0, 1],
[0, 0, 1, 0, 1, 0, 1, 1],
[0, 0, 0, 1, 0, 1, 1, 1]])
>>> hash_from_pattern(m,N=3)
array([0, 4, 2, 1, 6, 5, 3, 7])
"""
# check the consistency between shape of m and number neurons N
if N != np.shape(m)[0]:
raise ValueError('patterns in the matrix should be column entries')
# check the entries of the matrix
if not np.all((np.array(m) == 0) + (np.array(m) == 1)):
raise ValueError('patterns should be zero or one')
# generate the representation
v = np.array([base**x for x in range(N)])
# reverse the order
v = v[np.argsort(-v)]
# calculate the binary number by use of scalar product
return np.dot(v, m)
def inverse_hash_from_pattern(h, N, base=2):
"""
Calculate the 0-1 spike patterns (matrix) from hash values
Parameters:
-----------
h: list of integers
list or array of hash values, length: number of patterns
N: integer
number of neurons
base: integer
base for calculation of the number from binary
sequences (= pattern).
Default is 2
Raises:
-------
ValueError: if the hash is not compatible with the number
of neurons hash value should not be larger than the biggest
possible hash number with given number of neurons
(e.g. for N = 2, max(hash) = 2^1 + 2^0 = 3
, or for N = 4, max(hash) = 2^3 + 2^2 + 2^1 + 2^0 = 15)
Returns:
--------
numpy.array:
A matrix of shape: (N, number of patterns)
Examples
---------
>>> import numpy as np
>>> h = np.array([3,7])
>>> N = 4
>>> inverse_hash_from_pattern(h,N)
array([[1, 1],
[1, 1],
[0, 1],
[0, 0]])
"""
# check if the hash values are not greater than the greatest possible
# value for N neurons with the given base
if np.any(h > np.sum([base**x for x in range(N)])):
raise ValueError(
"hash value is not compatible with the number of neurons N")
# check if the hash values are integer
if not np.all(np.int64(h) == h):
raise ValueError("hash values are not integers")
m = np.zeros((N, len(h)), dtype=int)
for j, hh in enumerate(h):
i = N - 1
while i >= 0 and hh != 0:
m[i, j] = hh % base
hh /= base
i -= 1
return m
def n_emp_mat(mat, N, pattern_hash, base=2):
"""
Count the occurrences of spike coincidence patterns
in the given spike trains.
Parameters:
-----------
mat: 2-dim ndarray
binned spike trains of N neurons. Rows and columns correspond
to neurons and temporal bins, respectively.
N: integer
number of neurons
pattern_hash: list of integers
hash values representing the spike coincidence patterns
of which occurrences are counted.
base: integer
Base which was used to generate the hash values.
Default is 2
Returns:
--------
N_emp: list of integers
number of occurrences of the given patterns in the given spike trains
indices: list of lists of integers
indices indexing the bins where the given spike patterns are found
in `mat`. Same length as `pattern_hash`
indices[i] = N_emp[i] = pattern_hash[i]
Raises:
-------
ValueError: if mat is not zero-one matrix
Examples:
---------
>>> mat = np.array([[1, 0, 0, 1, 1],
[1, 0, 0, 1, 0]])
>>> pattern_hash = np.array([1,3])
>>> n_emp, n_emp_indices = N_emp_mat(mat, N,pattern_hash)
>>> print n_emp
[ 0. 2.]
>>> print n_emp_indices
[array([]), array([0, 3])]
"""
# check if the mat is zero-one matrix
if not np.all((np.array(mat) == 0) + (np.array(mat) == 1)):
raise ValueError("entries of mat should be either one or zero")
h = hash_from_pattern(mat, N, base=base)
N_emp = np.zeros(len(pattern_hash))
indices = []
for idx_ph, ph in enumerate(pattern_hash):
indices_tmp = np.where(h == ph)[0]
indices.append(indices_tmp)
N_emp[idx_ph] = len(indices_tmp)
return N_emp, indices
def n_emp_mat_sum_trial(mat, N, pattern_hash):
"""
Calculates empirical number of observed patterns summed across trials
Parameters:
-----------
mat: 3d numpy array or elephant BinnedSpikeTrain object
Binned spike trains represented as a binary matrix (i.e., matrix of 0's and 1's),
segmented into trials. Trials should contain an identical number of neurons and
an identical number of time bins.
the entries are zero or one
0-axis --> trials
1-axis --> neurons
2-axis --> time bins
N: integer
number of neurons
pattern_hash: list of integers
array of hash values, length: number of patterns
Returns:
--------
N_emp: list of integers
numbers of occurences of the given spike patterns in the given spike trains,
summed across trials. Same length as `pattern_hash`.
idx_trials: list of lists of integers
list of indices of mat for each trial in which
the specific pattern has been observed.
0-axis --> trial
1-axis --> list of indices for the chosen trial per
entry of `pattern_hash`
Raises:
-------
ValueError: if matrix mat has wrong orientation
ValueError: if mat is not zero-one matrix
Examples:
---------
>>> mat = np.array([[[1, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 0, 1]],
[[1, 1, 1, 1, 1],
[0, 1, 1, 1, 1],
[1, 1, 0, 1, 0]]])
>>> pattern_hash = np.array([4,6])
>>> N = 3
>>> n_emp_sum_trial, n_emp_sum_trial_idx =
n_emp_mat_sum_trial(mat, N,pattern_hash)
>>> n_emp_sum_trial
array([ 1., 3.])
>>> n_emp_sum_trial_idx
[[array([0]), array([3])], [array([], dtype=int64), array([2, 4])]]
"""
# check the consistency between shape of m and number neurons N
if N != np.shape(mat)[1]:
raise ValueError('the entries of mat should be a list of a'
'list where 0-axis is trials and 1-axis is neurons')
num_patt = len(pattern_hash)
N_emp = np.zeros(num_patt)
idx_trials = []
# check if the mat is zero-one matrix
if not np.all((np.array(mat) == 0) + (np.array(mat) == 1)):
raise ValueError("entries of mat should be either one or zero")
for mat_tr in mat:
N_emp_tmp, indices_tmp = n_emp_mat(mat_tr, N, pattern_hash, base=2)
idx_trials.append(indices_tmp)
N_emp += N_emp_tmp
return N_emp, idx_trials
def _n_exp_mat_analytic(mat, N, pattern_hash):
"""
Calculates the expected joint probability for each spike pattern analyticaly
"""
marg_prob = np.mean(mat, 1, dtype=float)
# marg_prob needs to be a column vector, so we
# build a two dimensional array with 1 column
# and len(marg_prob) rows
marg_prob = np.reshape(marg_prob, (len(marg_prob), 1))
m = inverse_hash_from_pattern(pattern_hash, N)
nrep = np.shape(m)[1]
# multipyling the marginal probability of neurons with regard to the
# pattern
pmat = np.multiply(m, np.tile(marg_prob, (1, nrep))) +\
np.multiply(1 - m, np.tile(1 - marg_prob, (1, nrep)))
return np.prod(pmat, axis=0) * float(np.shape(mat)[1])
def _n_exp_mat_surrogate(mat, N, pattern_hash, n_surr=1):
"""
Calculates the expected joint probability for each spike pattern with spike
time randomization surrogate
"""
if len(pattern_hash) > 1:
raise ValueError('surrogate method works only for one pattern!')
N_exp_array = np.zeros(n_surr)
for rz_idx, rz in enumerate(np.arange(n_surr)):
# shuffling all elements of zero-one matrix
mat_surr = np.array(mat)
[np.random.shuffle(i) for i in mat_surr]
N_exp_array[rz_idx] = n_emp_mat(mat_surr, N, pattern_hash)[0][0]
return N_exp_array
def n_exp_mat(mat, N, pattern_hash, method='analytic', n_surr=1):
"""
Calculates the expected joint probability for each spike pattern
Parameters:
-----------
mat: 2d numpy array
the entries are zero or one
0-axis --> neurons
1-axis --> time bins
pattern_hash: list of integers
array of hash values, length: number of patterns
method: string
method with which the expectency should be caculated
'analytic' -- > analytically
'surr' -- > with surrogates (spike time randomization)
Default is 'analytic'
n_surr: integer
number of surrogates for constructing the distribution of expected joint probability.
Default is 1 and this number is needed only when method = 'surr'
kwargs:
-------
Raises:
-------
ValueError: if matrix m has wrong orientation
Returns:
--------
if method is analytic:
numpy.array:
An array containing the expected joint probability of each pattern,
shape: (number of patterns,)
if method is surr:
numpy.ndarray, 0-axis --> different realizations,
length = number of surrogates
1-axis --> patterns
Examples:
---------
>>> mat = np.array([[1, 1, 1, 1],
[0, 1, 0, 1],
[0, 0, 1, 0]])
>>> pattern_hash = np.array([5,6])
>>> N = 3
>>> n_exp_anal = n_exp_mat(mat,N, pattern_hash, method = 'analytic')
>>> n_exp_anal
[ 0.5 1.5 ]
>>>
>>>
>>> n_exp_surr = n_exp_mat(
mat, N,pattern_hash, method = 'surr', n_surr = 5000)
>>> print n_exp_surr
[[ 1. 1.]
[ 2. 0.]
[ 2. 0.]
...,
[ 2. 0.]
[ 2. 0.]
[ 1. 1.]]
"""
# check if the mat is zero-one matrix
if np.any(mat > 1) or np.any(mat < 0):
raise ValueError("entries of mat should be either one or zero")
if method == 'analytic':
return _n_exp_mat_analytic(mat, N, pattern_hash)
if method == 'surr':
return _n_exp_mat_surrogate(mat, N, pattern_hash, n_surr)
def n_exp_mat_sum_trial(
mat, N, pattern_hash, method='analytic_TrialByTrial', **kwargs):
"""
Calculates the expected joint probability
for each spike pattern sum over trials
Parameters:
-----------
mat: 3d numpy array or elephant BinnedSpikeTrain object
Binned spike trains represented as a binary matrix (i.e., matrix of 0's and 1's),
segmented into trials. Trials should contain an identical number of neurons and
an identical number of time bins.
the entries are zero or one
0-axis --> trials
1-axis --> neurons
2-axis --> time bins
N: integer
number of neurons
pattern_hash: list of integers
array of hash values, length: number of patterns
method: string
method with which the unitary events whould be computed
'analytic_TrialByTrial' -- > calculate the expectency
(analytically) on each trial, then sum over all trials.
'analytic_TrialAverage' -- > calculate the expectency
by averaging over trials.
(cf. Gruen et al. 2003)
'surrogate_TrialByTrial' -- > calculate the distribution
of expected coincidences by spike time randomzation in
each trial and sum over trials.
Default is 'analytic_trialByTrial'
kwargs:
-------
n_surr: integer
number of surrogate to be used
Default is 1
Returns:
--------
numpy.array:
An array containing the expected joint probability of
each pattern summed over trials,shape: (number of patterns,)
Examples:
--------
>>> mat = np.array([[[1, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 0, 1]],
[[1, 1, 1, 1, 1],
[0, 1, 1, 1, 1],
[1, 1, 0, 1, 0]]])
>>> pattern_hash = np.array([5,6])
>>> N = 3
>>> n_exp_anal = n_exp_mat_sum_trial(mat, N, pattern_hash)
>>> print n_exp_anal
array([ 1.56, 2.56])
"""
# check the consistency between shape of m and number neurons N
if N != np.shape(mat)[1]:
raise ValueError('the entries of mat should be a list of a'
'list where 0-axis is trials and 1-axis is neurons')
if method == 'analytic_TrialByTrial':
n_exp = np.zeros(len(pattern_hash))
for mat_tr in mat:
n_exp += n_exp_mat(mat_tr, N, pattern_hash, method='analytic')
elif method == 'analytic_TrialAverage':
n_exp = n_exp_mat(
np.mean(mat, 0), N, pattern_hash, method='analytic') * np.shape(mat)[0]
elif method == 'surrogate_TrialByTrial':
if 'n_surr' in kwargs:
n_surr = kwargs['n_surr']
else:
n_surr = 1.
n_exp = np.zeros(n_surr)
for mat_tr in mat:
n_exp += n_exp_mat(mat_tr, N, pattern_hash,
method='surr', n_surr=n_surr)
else:
raise ValueError(
"The method only works on the zero_one matrix at the moment")
return n_exp
def gen_pval_anal(
mat, N, pattern_hash, method='analytic_TrialByTrial', **kwargs):
"""
computes the expected coincidences and a function to calculate
p-value for given empirical coincidences
this function generate a poisson distribution with the expected
value calculated by mat. it returns a function which gets
the empirical coincidences, `n_emp`, and calculates a p-value
as the area under the poisson distribution from `n_emp` to infinity
Parameters:
-----------
mat: 3d numpy array or elephant BinnedSpikeTrain object
Binned spike trains represented as a binary matrix (i.e., matrix of 0's and 1's),
segmented into trials. Trials should contain an identical number of neurons and
an identical number of time bins.
the entries are zero or one
0-axis --> trials
1-axis --> neurons
2-axis --> time bins
N: integer
number of neurons
pattern_hash: list of integers
array of hash values, length: number of patterns
method: string
method with which the unitary events whould be computed
'analytic_TrialByTrial' -- > calculate the expectency
(analytically) on each trial, then sum over all trials.
''analytic_TrialAverage' -- > calculate the expectency
by averaging over trials.
Default is 'analytic_trialByTrial'
(cf. Gruen et al. 2003)
kwargs:
-------
n_surr: integer
number of surrogate to be used
Default is 1
Returns:
--------
pval_anal:
a function which calculates the p-value for
the given empirical coincidences
n_exp: list of floats
expected coincidences
Examples:
--------
>>> mat = np.array([[[1, 1, 1, 1, 0],
[0, 1, 1, 1, 0],
[0, 1, 1, 0, 1]],
[[1, 1, 1, 1, 1],
[0, 1, 1, 1, 1],
[1, 1, 0, 1, 0]]])
>>> pattern_hash = np.array([5,6])
>>> N = 3
>>> pval_anal,n_exp = gen_pval_anal(mat, N,pattern_hash)
>>> n_exp
array([ 1.56, 2.56])
"""
if method == 'analytic_TrialByTrial' or method == 'analytic_TrialAverage':
n_exp = n_exp_mat_sum_trial(mat, N, pattern_hash, method=method)
def pval(n_emp):
p = 1. - scipy.special.gammaincc(n_emp, n_exp)
return p
elif method == 'surrogate_TrialByTrial':
if 'n_surr' in kwargs:
n_surr = kwargs['n_surr']
else:
n_surr = 1.
n_exp = n_exp_mat_sum_trial(
mat, N, pattern_hash, method=method, n_surr=n_surr)
def pval(n_emp):
hist = np.bincount(np.int64(n_exp))
exp_dist = hist / float(np.sum(hist))
if len(n_emp) > 1:
raise ValueError(
'in surrogate method the p_value can be calculated only for one pattern!')
return np.sum(exp_dist[int(n_emp[0]):])
return pval, n_exp
def jointJ(p_val):
"""Surprise measurement
logarithmic transformation of joint-p-value into surprise measure
for better visualization as the highly significant events are
indicated by very low joint-p-values
Parameters:
-----------
p_val: list of floats
p-values of statistical tests for different pattern.
Returns:
--------
J: list of floats
list of surprise measure
Examples:
---------
>>> p_val = np.array([0.31271072, 0.01175031])
>>> jointJ(p_val)
array([0.3419968 , 1.92481736])
"""
p_arr = np.array(p_val)
try:
Js = np.log10(1 - p_arr) - np.log10(p_arr)
except RuntimeWarning:
pass
return Js
def _rate_mat_avg_trial(mat):
"""
calculates the average firing rate of each neurons across trials
"""
num_tr, N, nbins = np.shape(mat)
psth = np.zeros(N)
for tr, mat_tr in enumerate(mat):
psth += np.sum(mat_tr, axis=1)
return psth / float(nbins) / float(num_tr)
def _bintime(t, binsize):
"""
change the real time to bintime
"""
t_dl = t.rescale('ms').magnitude
binsize_dl = binsize.rescale('ms').magnitude
return np.floor(np.array(t_dl) / binsize_dl).astype(int)
def _winpos(t_start, t_stop, winsize, winstep, position='left-edge'):
"""
Calculates the position of the analysis window
"""
t_start_dl = t_start.rescale('ms').magnitude
t_stop_dl = t_stop.rescale('ms').magnitude
winsize_dl = winsize.rescale('ms').magnitude
winstep_dl = winstep.rescale('ms').magnitude
# left side of the window time
if position == 'left-edge':
ts_winpos = np.arange(
t_start_dl, t_stop_dl - winsize_dl + winstep_dl, winstep_dl) * pq.ms
else:
raise ValueError(
'the current version only returns left-edge of the window')
return ts_winpos
def _UE(mat, N, pattern_hash, method='analytic_TrialByTrial', **kwargs):
"""
returns the default results of unitary events analysis
(Surprise, empirical coincidences and index of where it happened
in the given mat, n_exp and average rate of neurons)
"""
rate_avg = _rate_mat_avg_trial(mat)
n_emp, indices = n_emp_mat_sum_trial(mat, N, pattern_hash)
if method == 'surrogate_TrialByTrial':
if 'n_surr' in kwargs:
n_surr = kwargs['n_surr']
else:
n_surr = 1
dist_exp, n_exp = gen_pval_anal(
mat, N, pattern_hash, method, n_surr=n_surr)
n_exp = np.mean(n_exp)
elif method == 'analytic_TrialByTrial' or method == 'analytic_TrialAverage':
dist_exp, n_exp = gen_pval_anal(mat, N, pattern_hash, method)
pval = dist_exp(n_emp)
Js = jointJ(pval)
return Js, rate_avg, n_exp, n_emp, indices
def jointJ_window_analysis(
data, binsize, winsize, winstep, pattern_hash,
method='analytic_TrialByTrial', t_start=None,
t_stop=None, binary=True, **kwargs):
"""
Calculates the joint surprise in a sliding window fashion
Parameters:
----------
data: list of neo.SpikeTrain objects
list of spike trains in different trials
0-axis --> Trials
1-axis --> Neurons
2-axis --> Spike times
binsize: Quantity scalar with dimension time
size of bins for descritizing spike trains
winsize: Quantity scalar with dimension time
size of the window of analysis
winstep: Quantity scalar with dimension time
size of the window step
pattern_hash: list of integers
list of interested patterns in hash values
(see hash_from_pattern and inverse_hash_from_pattern functions)
method: string
method with which the unitary events whould be computed
'analytic_TrialByTrial' -- > calculate the expectency
(analytically) on each trial, then sum over all trials.
'analytic_TrialAverage' -- > calculate the expectency
by averaging over trials.
(cf. Gruen et al. 2003)
'surrogate_TrialByTrial' -- > calculate the distribution
of expected coincidences by spike time randomzation in
each trial and sum over trials.
Default is 'analytic_trialByTrial'
t_start: float or Quantity scalar, optional
The start time to use for the time points.
If not specified, retrieved from the `t_start`
attribute of `spiketrain`.
t_stop: float or Quantity scalar, optional
The start time to use for the time points.
If not specified, retrieved from the `t_stop`
attribute of `spiketrain`.
kwargs:
-------
n_surr: integer
number of surrogate to be used
Default is 100
Returns:
-------
result: dictionary
Js: list of float
JointSurprise of different given patterns within each window
shape: different pattern hash --> 0-axis
different window --> 1-axis
indices: list of list of integers
list of indices of pattern within each window
shape: different pattern hash --> 0-axis
different window --> 1-axis
n_emp: list of integers
empirical number of each observed pattern.
shape: different pattern hash --> 0-axis
different window --> 1-axis
n_exp: list of floats
expeced number of each pattern.
shape: different pattern hash --> 0-axis
different window --> 1-axis
rate_avg: list of floats
average firing rate of each neuron
shape: different pattern hash --> 0-axis
different window --> 1-axis
"""
if not isinstance(data[0][0], neo.SpikeTrain):
raise ValueError(
"structure of the data is not correct: 0-axis should be trials, 1-axis units and 2-axis neo spike trains")
if t_start is None:
t_start = data[0][0].t_start.rescale('ms')
if t_stop is None:
t_stop = data[0][0].t_stop.rescale('ms')
# position of all windows (left edges)
t_winpos = _winpos(t_start, t_stop, winsize, winstep, position='left-edge')
t_winpos_bintime = _bintime(t_winpos, binsize)
winsize_bintime = _bintime(winsize, binsize)
winstep_bintime = _bintime(winstep, binsize)
if winsize_bintime * binsize != winsize:
warnings.warn(
"ratio between winsize and binsize is not integer -- "
"the actual number for window size is " + str(winsize_bintime * binsize))
if winstep_bintime * binsize != winstep:
warnings.warn(
"ratio between winsize and binsize is not integer -- "
"the actual number for window size is" + str(winstep_bintime * binsize))
num_tr, N = np.shape(data)[:2]
n_bins = int((t_stop - t_start) / binsize)
mat_tr_unit_spt = np.zeros((len(data), N, n_bins))
for tr, sts in enumerate(data):
bs = conv.BinnedSpikeTrain(
sts, t_start=t_start, t_stop=t_stop, binsize=binsize)
if binary is True:
mat = bs.to_bool_array()
else:
raise ValueError(
"The method only works on the zero_one matrix at the moment")
mat_tr_unit_spt[tr] = mat
num_win = len(t_winpos)
Js_win, n_exp_win, n_emp_win = (np.zeros(num_win) for _ in range(3))
rate_avg = np.zeros((num_win, N))
indices_win = {}
for i in range(num_tr):
indices_win['trial' + str(i)] = []
for i, win_pos in enumerate(t_winpos_bintime):
mat_win = mat_tr_unit_spt[:, :, win_pos:win_pos + winsize_bintime]
if method == 'surrogate_TrialByTrial':
if 'n_surr' in kwargs:
n_surr = kwargs['n_surr']
else:
n_surr = 100
Js_win[i], rate_avg[i], n_exp_win[i], n_emp_win[i], indices_lst = _UE(
mat_win, N, pattern_hash, method, n_surr=n_surr)
else:
Js_win[i], rate_avg[i], n_exp_win[i], n_emp_win[
i], indices_lst = _UE(mat_win, N, pattern_hash, method)
for j in range(num_tr):
if len(indices_lst[j][0]) > 0:
indices_win[
'trial' + str(j)] = np.append(indices_win['trial' + str(j)], indices_lst[j][0] + win_pos)
return {'Js': Js_win, 'indices': indices_win, 'n_emp': n_emp_win, 'n_exp': n_exp_win, 'rate_avg': rate_avg / binsize}
| bsd-3-clause |
Tejal011089/osmosis_erpnext | erpnext/selling/doctype/sales_order/test_sales_order.py | 11 | 8741 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt, add_days
import frappe.permissions
import unittest
from erpnext.selling.doctype.sales_order.sales_order \
import make_material_request, make_delivery_note, make_sales_invoice
class TestSalesOrder(unittest.TestCase):
def tearDown(self):
frappe.set_user("Administrator")
def test_make_material_request(self):
so = make_sales_order(do_not_submit=True)
self.assertRaises(frappe.ValidationError, make_material_request, so.name)
so.submit()
mr = make_material_request(so.name)
self.assertEquals(mr.material_request_type, "Purchase")
self.assertEquals(len(mr.get("items")), len(so.get("items")))
def test_make_delivery_note(self):
so = make_sales_order(do_not_submit=True)
self.assertRaises(frappe.ValidationError, make_delivery_note, so.name)
so.submit()
dn = make_delivery_note(so.name)
self.assertEquals(dn.doctype, "Delivery Note")
self.assertEquals(len(dn.get("items")), len(so.get("items")))
def test_make_sales_invoice(self):
so = make_sales_order(do_not_submit=True)
self.assertRaises(frappe.ValidationError, make_sales_invoice, so.name)
so.submit()
si = make_sales_invoice(so.name)
self.assertEquals(len(si.get("items")), len(so.get("items")))
self.assertEquals(len(si.get("items")), 1)
si.insert()
si.submit()
si1 = make_sales_invoice(so.name)
self.assertEquals(len(si1.get("items")), 0)
def test_update_qty(self):
so = make_sales_order()
create_dn_against_so(so.name, 6)
so.load_from_db()
self.assertEquals(so.get("items")[0].delivered_qty, 6)
# Check delivered_qty after make_sales_invoice without update_stock checked
si1 = make_sales_invoice(so.name)
si1.get("items")[0].qty = 6
si1.insert()
si1.submit()
so.load_from_db()
self.assertEquals(so.get("items")[0].delivered_qty, 6)
# Check delivered_qty after make_sales_invoice with update_stock checked
si2 = make_sales_invoice(so.name)
si2.set("update_stock", 1)
si2.get("items")[0].qty = 3
si2.insert()
si2.submit()
so.load_from_db()
self.assertEquals(so.get("items")[0].delivered_qty, 9)
def test_reserved_qty_for_partial_delivery(self):
existing_reserved_qty = get_reserved_qty()
so = make_sales_order()
self.assertEqual(get_reserved_qty(), existing_reserved_qty + 10)
dn = create_dn_against_so(so.name)
self.assertEqual(get_reserved_qty(), existing_reserved_qty + 5)
# stop so
so.load_from_db()
so.stop_sales_order()
self.assertEqual(get_reserved_qty(), existing_reserved_qty)
# unstop so
so.load_from_db()
so.unstop_sales_order()
self.assertEqual(get_reserved_qty(), existing_reserved_qty + 5)
dn.cancel()
self.assertEqual(get_reserved_qty(), existing_reserved_qty + 10)
# cancel
so.load_from_db()
so.cancel()
self.assertEqual(get_reserved_qty(), existing_reserved_qty)
def test_reserved_qty_for_over_delivery(self):
# set over-delivery tolerance
frappe.db.set_value('Item', "_Test Item", 'tolerance', 50)
existing_reserved_qty = get_reserved_qty()
so = make_sales_order()
self.assertEqual(get_reserved_qty(), existing_reserved_qty + 10)
dn = create_dn_against_so(so.name, 15)
self.assertEqual(get_reserved_qty(), existing_reserved_qty)
dn.cancel()
self.assertEqual(get_reserved_qty(), existing_reserved_qty + 10)
def test_reserved_qty_for_partial_delivery_with_packing_list(self):
existing_reserved_qty_item1 = get_reserved_qty("_Test Item")
existing_reserved_qty_item2 = get_reserved_qty("_Test Item Home Desktop 100")
so = make_sales_order(item_code="_Test Sales BOM Item")
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1 + 50)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2 + 20)
dn = create_dn_against_so(so.name)
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1 + 25)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2 + 10)
# stop so
so.load_from_db()
so.stop_sales_order()
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"), existing_reserved_qty_item2)
# unstop so
so.load_from_db()
so.unstop_sales_order()
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1 + 25)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2 + 10)
dn.cancel()
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1 + 50)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2 + 20)
so.load_from_db()
so.cancel()
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"), existing_reserved_qty_item2)
def test_reserved_qty_for_over_delivery_with_packing_list(self):
# set over-delivery tolerance
frappe.db.set_value('Item', "_Test Sales BOM Item", 'tolerance', 50)
existing_reserved_qty_item1 = get_reserved_qty("_Test Item")
existing_reserved_qty_item2 = get_reserved_qty("_Test Item Home Desktop 100")
so = make_sales_order(item_code="_Test Sales BOM Item")
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1 + 50)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2 + 20)
dn = create_dn_against_so(so.name, 15)
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2)
dn.cancel()
self.assertEqual(get_reserved_qty("_Test Item"), existing_reserved_qty_item1 + 50)
self.assertEqual(get_reserved_qty("_Test Item Home Desktop 100"),
existing_reserved_qty_item2 + 20)
def test_warehouse_user(self):
frappe.permissions.add_user_permission("Warehouse", "_Test Warehouse 1 - _TC", "test@example.com")
frappe.permissions.add_user_permission("Warehouse", "_Test Warehouse 2 - _TC1", "test2@example.com")
frappe.permissions.add_user_permission("Company", "_Test Company 1", "test2@example.com")
test_user = frappe.get_doc("User", "test@example.com")
test_user.add_roles("Sales User", "Material User")
test_user.remove_roles("Sales Manager")
test_user_2 = frappe.get_doc("User", "test2@example.com")
test_user_2.add_roles("Sales User", "Material User")
test_user_2.remove_roles("Sales Manager")
frappe.set_user("test@example.com")
so = make_sales_order(company="_Test Company 1",
warehouse="_Test Warehouse 2 - _TC1", do_not_save=True)
so.conversion_rate = 0.02
so.plc_conversion_rate = 0.02
self.assertRaises(frappe.PermissionError, so.insert)
frappe.set_user("test2@example.com")
so.insert()
frappe.permissions.remove_user_permission("Warehouse", "_Test Warehouse 1 - _TC", "test@example.com")
frappe.permissions.remove_user_permission("Warehouse", "_Test Warehouse 2 - _TC1", "test2@example.com")
frappe.permissions.remove_user_permission("Company", "_Test Company 1", "test2@example.com")
def test_block_delivery_note_against_cancelled_sales_order(self):
so = make_sales_order()
dn = make_delivery_note(so.name)
dn.insert()
so.cancel()
self.assertRaises(frappe.CancelledLinkError, dn.submit)
def make_sales_order(**args):
so = frappe.new_doc("Sales Order")
args = frappe._dict(args)
if args.transaction_date:
so.transaction_date = args.transaction_date
so.company = args.company or "_Test Company"
so.customer = args.customer or "_Test Customer"
so.delivery_date = add_days(so.transaction_date, 10)
so.currency = args.currency or "INR"
so.append("items", {
"item_code": args.item or args.item_code or "_Test Item",
"warehouse": args.warehouse or "_Test Warehouse - _TC",
"qty": args.qty or 10,
"rate": args.rate or 100,
"conversion_factor": 1.0,
})
if not args.do_not_save:
so.insert()
if not args.do_not_submit:
so.submit()
return so
def create_dn_against_so(so, delivered_qty=0):
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", 1)
dn = make_delivery_note(so)
dn.get("items")[0].qty = delivered_qty or 5
dn.insert()
dn.submit()
return dn
def get_reserved_qty(item_code="_Test Item", warehouse="_Test Warehouse - _TC"):
return flt(frappe.db.get_value("Bin", {"item_code": item_code, "warehouse": warehouse},
"reserved_qty"))
test_dependencies = ["Currency Exchange"] | agpl-3.0 |
yokose-ks/edx-platform | common/lib/sandbox-packages/verifiers/draganddrop.py | 70 | 15072 | """ Grader of drag and drop input.
Client side behavior: user can drag and drop images from list on base image.
Then json returned from client is:
{
"draggable": [
{ "image1": "t1" },
{ "ant": "t2" },
{ "molecule": "t3" },
]
}
values are target names.
or:
{
"draggable": [
{ "image1": "[10, 20]" },
{ "ant": "[30, 40]" },
{ "molecule": "[100, 200]" },
]
}
values are (x,y) coordinates of centers of dragged images.
"""
import json
def flat_user_answer(user_answer):
"""
Convert nested `user_answer` to flat format.
{'up': {'first': {'p': 'p_l'}}}
to
{'up': 'p_l[p][first]'}
"""
def parse_user_answer(answer):
key = answer.keys()[0]
value = answer.values()[0]
if isinstance(value, dict):
# Make complex value:
# Example:
# Create like 'p_l[p][first]' from {'first': {'p': 'p_l'}
complex_value_list = []
v_value = value
while isinstance(v_value, dict):
v_key = v_value.keys()[0]
v_value = v_value.values()[0]
complex_value_list.append(v_key)
complex_value = '{0}'.format(v_value)
for i in reversed(complex_value_list):
complex_value = '{0}[{1}]'.format(complex_value, i)
res = {key: complex_value}
return res
else:
return answer
result = []
for answer in user_answer:
parse_answer = parse_user_answer(answer)
result.append(parse_answer)
return result
class PositionsCompare(list):
""" Class for comparing positions.
Args:
list or string::
"abc" - target
[10, 20] - list of integers
[[10,20], 200] list of list and integer
"""
def __eq__(self, other):
""" Compares two arguments.
Default lists behavior is conversion of string "abc" to list
["a", "b", "c"]. We will use that.
If self or other is empty - returns False.
Args:
self, other: str, unicode, list, int, float
Returns: bool
"""
# checks if self or other is not empty list (empty lists = false)
if not self or not other:
return False
if (isinstance(self[0], (list, int, float)) and
isinstance(other[0], (list, int, float))):
return self.coordinate_positions_compare(other)
elif (isinstance(self[0], (unicode, str)) and
isinstance(other[0], (unicode, str))):
return ''.join(self) == ''.join(other)
else: # improper argument types: no (float / int or lists of list
#and float / int pair) or two string / unicode lists pair
return False
def __ne__(self, other):
return not self.__eq__(other)
def coordinate_positions_compare(self, other, r=10):
""" Checks if self is equal to other inside radius of forgiveness
(default 10 px).
Args:
self, other: [x, y] or [[x, y], r], where r is radius of
forgiveness;
x, y, r: int
Returns: bool.
"""
# get max radius of forgiveness
if isinstance(self[0], list): # [(x, y), r] case
r = max(self[1], r)
x1, y1 = self[0]
else:
x1, y1 = self
if isinstance(other[0], list): # [(x, y), r] case
r = max(other[1], r)
x2, y2 = other[0]
else:
x2, y2 = other
if (x2 - x1) ** 2 + (y2 - y1) ** 2 > r * r:
return False
return True
class DragAndDrop(object):
""" Grader class for drag and drop inputtype.
"""
def grade(self):
''' Grader user answer.
Checks if every draggable isplaced on proper target or on proper
coordinates within radius of forgiveness (default is 10).
Returns: bool.
'''
for draggable in self.excess_draggables:
if self.excess_draggables[draggable]:
return False # user answer has more draggables than correct answer
# Number of draggables in user_groups may be differ that in
# correct_groups, that is incorrect, except special case with 'number'
for index, draggable_ids in enumerate(self.correct_groups):
# 'number' rule special case
# for reusable draggables we may get in self.user_groups
# {'1': [u'2', u'2', u'2'], '0': [u'1', u'1'], '2': [u'3']}
# if '+number' is in rule - do not remove duplicates and strip
# '+number' from rule
current_rule = self.correct_positions[index].keys()[0]
if 'number' in current_rule:
rule_values = self.correct_positions[index][current_rule]
# clean rule, do not do clean duplicate items
self.correct_positions[index].pop(current_rule, None)
parsed_rule = current_rule.replace('+', '').replace('number', '')
self.correct_positions[index][parsed_rule] = rule_values
else: # remove dublicates
self.user_groups[index] = list(set(self.user_groups[index]))
if sorted(draggable_ids) != sorted(self.user_groups[index]):
return False
# Check that in every group, for rule of that group, user positions of
# every element are equal with correct positions
for index, _ in enumerate(self.correct_groups):
rules_executed = 0
for rule in ('exact', 'anyof', 'unordered_equal'):
# every group has only one rule
if self.correct_positions[index].get(rule, None):
rules_executed += 1
if not self.compare_positions(
self.correct_positions[index][rule],
self.user_positions[index]['user'], flag=rule):
return False
if not rules_executed: # no correct rules for current group
# probably xml content mistake - wrong rules names
return False
return True
def compare_positions(self, correct, user, flag):
""" Compares two lists of positions with flag rules. Order of
correct/user arguments is matter only in 'anyof' flag.
Rules description:
'exact' means 1-1 ordered relationship::
[el1, el2, el3] is 'exact' equal to [el5, el6, el7] when
el1 == el5, el2 == el6, el3 == el7.
Equality function is custom, see below.
'anyof' means subset relationship::
user = [el1, el2] is 'anyof' equal to correct = [el1, el2, el3]
when
set(user) <= set(correct).
'anyof' is ordered relationship. It always checks if user
is subset of correct
Equality function is custom, see below.
Examples:
- many draggables per position:
user ['1','2','2','2'] is 'anyof' equal to ['1', '2', '3']
- draggables can be placed in any order:
user ['1','2','3','4'] is 'anyof' equal to ['4', '2', '1', 3']
'unordered_equal' is same as 'exact' but disregards on order
Equality functions:
Equality functon depends on type of element. They declared in
PositionsCompare class. For position like targets
ids ("t1", "t2", etc..) it is string equality function. For coordinate
positions ([1,2] or [[1,2], 15]) it is coordinate_positions_compare
function (see docstrings in PositionsCompare class)
Args:
correst, user: lists of positions
Returns: True if within rule lists are equal, otherwise False.
"""
if flag == 'exact':
if len(correct) != len(user):
return False
for el1, el2 in zip(correct, user):
if PositionsCompare(el1) != PositionsCompare(el2):
return False
if flag == 'anyof':
for u_el in user:
for c_el in correct:
if PositionsCompare(u_el) == PositionsCompare(c_el):
break
else:
# General: the else is executed after the for,
# only if the for terminates normally (not by a break)
# In this case, 'for' is terminated normally if every element
# from 'correct' list isn't equal to concrete element from
# 'user' list. So as we found one element from 'user' list,
# that not in 'correct' list - we return False
return False
if flag == 'unordered_equal':
if len(correct) != len(user):
return False
temp = correct[:]
for u_el in user:
for c_el in temp:
if PositionsCompare(u_el) == PositionsCompare(c_el):
temp.remove(c_el)
break
else:
# same as upper - if we found element from 'user' list,
# that not in 'correct' list - we return False.
return False
return True
def __init__(self, correct_answer, user_answer):
""" Populates DragAndDrop variables from user_answer and correct_answer.
If correct_answer is dict, converts it to list.
Correct answer in dict form is simpe structure for fast and simple
grading. Example of correct answer dict example::
correct_answer = {'name4': 't1',
'name_with_icon': 't1',
'5': 't2',
'7': 't2'}
It is draggable_name: dragable_position mapping.
Advanced form converted from simple form uses 'exact' rule
for matching.
Correct answer in list form is designed for advanced cases::
correct_answers = [
{
'draggables': ['1', '2', '3', '4', '5', '6'],
'targets': [
's_left', 's_right', 's_sigma', 's_sigma_star', 'p_pi_1', 'p_pi_2'],
'rule': 'anyof'},
{
'draggables': ['7', '8', '9', '10'],
'targets': ['p_left_1', 'p_left_2', 'p_right_1', 'p_right_2'],
'rule': 'anyof'
}
]
Advanced answer in list form is list of dicts, and every dict must have
3 keys: 'draggables', 'targets' and 'rule'. 'Draggables' value is
list of draggables ids, 'targes' values are list of targets ids, 'rule'
value one of 'exact', 'anyof', 'unordered_equal', 'anyof+number',
'unordered_equal+number'
Advanced form uses "all dicts must match with their rule" logic.
Same draggable cannot appears more that in one dict.
Behavior is more widely explained in sphinx documentation.
Args:
user_answer: json
correct_answer: dict or list
"""
self.correct_groups = [] # Correct groups from xml.
self.correct_positions = [] # Correct positions for comparing.
self.user_groups = [] # Will be populated from user answer.
self.user_positions = [] # Will be populated from user answer.
# Convert from dict answer format to list format.
if isinstance(correct_answer, dict):
tmp = []
for key, value in correct_answer.items():
tmp.append({
'draggables': [key],
'targets': [value],
'rule': 'exact'})
correct_answer = tmp
# Convert string `user_answer` to object.
user_answer = json.loads(user_answer)
# This dictionary will hold a key for each draggable the user placed on
# the image. The value is True if that draggable is not mentioned in any
# correct_answer entries. If the draggable is mentioned in at least one
# correct_answer entry, the value is False.
# default to consider every user answer excess until proven otherwise.
self.excess_draggables = dict((users_draggable.keys()[0],True)
for users_draggable in user_answer)
# Convert nested `user_answer` to flat format.
user_answer = flat_user_answer(user_answer)
# Create identical data structures from user answer and correct answer.
for answer in correct_answer:
user_groups_data = []
user_positions_data = []
for draggable_dict in user_answer:
# Draggable_dict is 1-to-1 {draggable_name: position}.
draggable_name = draggable_dict.keys()[0]
if draggable_name in answer['draggables']:
user_groups_data.append(draggable_name)
user_positions_data.append(
draggable_dict[draggable_name])
# proved that this is not excess
self.excess_draggables[draggable_name] = False
self.correct_groups.append(answer['draggables'])
self.correct_positions.append({answer['rule']: answer['targets']})
self.user_groups.append(user_groups_data)
self.user_positions.append({'user': user_positions_data})
def grade(user_input, correct_answer):
""" Creates DragAndDrop instance from user_input and correct_answer and
calls DragAndDrop.grade for grading.
Supports two interfaces for correct_answer: dict and list.
Args:
user_input: json. Format::
{ "draggables":
[{"1": [10, 10]}, {"name_with_icon": [20, 20]}]}'
or
{"draggables": [{"1": "t1"}, \
{"name_with_icon": "t2"}]}
correct_answer: dict or list.
Dict form::
{'1': 't1', 'name_with_icon': 't2'}
or
{'1': '[10, 10]', 'name_with_icon': '[[10, 10], 20]'}
List form::
correct_answer = [
{
'draggables': ['l3_o', 'l10_o'],
'targets': ['t1_o', 't9_o'],
'rule': 'anyof'
},
{
'draggables': ['l1_c','l8_c'],
'targets': ['t5_c','t6_c'],
'rule': 'anyof'
}
]
Returns: bool
"""
return DragAndDrop(correct_answer=correct_answer,
user_answer=user_input).grade()
| agpl-3.0 |
closureplease/npm-closure-tools | closure-bin/labs/code/generate_jsdoc_test.py | 212 | 3494 | #!/usr/bin/env python
#
# Copyright 2013 The Closure Library Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required `by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit test for generate_jsdoc."""
__author__ = 'nnaze@google.com (Nathan Naze)'
import re
import unittest
import generate_jsdoc
class InsertJsDocTestCase(unittest.TestCase):
"""Unit test for source. Tests the parser on a known source input."""
def testMatchFirstFunction(self):
match = generate_jsdoc._MatchFirstFunction(_TEST_SOURCE)
self.assertNotEqual(None, match)
self.assertEqual('aaa, bbb, ccc', match.group('arguments'))
match = generate_jsdoc._MatchFirstFunction(_INDENTED_SOURCE)
self.assertNotEqual(None, match)
self.assertEqual('', match.group('arguments'))
match = generate_jsdoc._MatchFirstFunction(_ODD_NEWLINES_SOURCE)
self.assertEquals('goog.\nfoo.\nbar\n.baz.\nqux',
match.group('identifier'))
def testParseArgString(self):
self.assertEquals(
['foo', 'bar', 'baz'],
list(generate_jsdoc._ParseArgString('foo, bar, baz')))
def testExtractFunctionBody(self):
self.assertEquals(
'\n // Function comments.\n return;\n',
generate_jsdoc._ExtractFunctionBody(_TEST_SOURCE))
self.assertEquals(
'\n var bar = 3;\n return true;\n',
generate_jsdoc._ExtractFunctionBody(_INDENTED_SOURCE, 2))
def testContainsValueReturn(self):
self.assertTrue(generate_jsdoc._ContainsReturnValue(_INDENTED_SOURCE))
self.assertFalse(generate_jsdoc._ContainsReturnValue(_TEST_SOURCE))
def testInsertString(self):
self.assertEquals(
'abc123def',
generate_jsdoc._InsertString('abcdef', '123', 3))
def testInsertJsDoc(self):
self.assertEquals(
_EXPECTED_INDENTED_SOURCE,
generate_jsdoc.InsertJsDoc(_INDENTED_SOURCE))
self.assertEquals(
_EXPECTED_TEST_SOURCE,
generate_jsdoc.InsertJsDoc(_TEST_SOURCE))
self.assertEquals(
_EXPECTED_ODD_NEWLINES_SOURCE,
generate_jsdoc.InsertJsDoc(_ODD_NEWLINES_SOURCE))
_INDENTED_SOURCE = """\
boo.foo.woo = function() {
var bar = 3;
return true;
};
"""
_EXPECTED_INDENTED_SOURCE = """\
/**
* @return
*/
boo.foo.woo = function() {
var bar = 3;
return true;
};
"""
_TEST_SOURCE = """\
// Random comment.
goog.foo.bar = function (aaa, bbb, ccc) {
// Function comments.
return;
};
"""
_EXPECTED_TEST_SOURCE = """\
// Random comment.
/**
* @param {} aaa
* @param {} bbb
* @param {} ccc
*/
goog.foo.bar = function (aaa, bbb, ccc) {
// Function comments.
return;
};
"""
_ODD_NEWLINES_SOURCE = """\
goog.
foo.
bar
.baz.
qux
=
function
(aaa,
bbb, ccc) {
// Function comments.
return;
};
"""
_EXPECTED_ODD_NEWLINES_SOURCE = """\
/**
* @param {} aaa
* @param {} bbb
* @param {} ccc
*/
goog.
foo.
bar
.baz.
qux
=
function
(aaa,
bbb, ccc) {
// Function comments.
return;
};
"""
if __name__ == '__main__':
unittest.main()
| mit |
baoson2211/ardupilot | Tools/LogAnalyzer/tests/TestIMUMatch.py | 61 | 3781 | from LogAnalyzer import Test,TestResult
import DataflashLog
from math import sqrt
class TestIMUMatch(Test):
'''test for empty or near-empty logs'''
def __init__(self):
Test.__init__(self)
self.name = "IMU Mismatch"
def run(self, logdata, verbose):
#tuning parameters:
warn_threshold = .75
fail_threshold = 1.5
filter_tc = 5.0
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
if ("IMU" in logdata.channels) and (not "IMU2" in logdata.channels):
self.result.status = TestResult.StatusType.NA
self.result.statusMessage = "No IMU2"
return
if (not "IMU" in logdata.channels) or (not "IMU2" in logdata.channels):
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No IMU log data"
return
imu1 = logdata.channels["IMU"]
imu2 = logdata.channels["IMU2"]
imu1_timems = imu1["TimeMS"].listData
imu1_accx = imu1["AccX"].listData
imu1_accy = imu1["AccY"].listData
imu1_accz = imu1["AccZ"].listData
imu2_timems = imu2["TimeMS"].listData
imu2_accx = imu2["AccX"].listData
imu2_accy = imu2["AccY"].listData
imu2_accz = imu2["AccZ"].listData
imu1 = []
imu2 = []
for i in range(len(imu1_timems)):
imu1.append({ 't': imu1_timems[i][1]*1.0E-3, 'x': imu1_accx[i][1], 'y': imu1_accy[i][1], 'z': imu1_accz[i][1]})
for i in range(len(imu2_timems)):
imu2.append({ 't': imu2_timems[i][1]*1.0E-3, 'x': imu2_accx[i][1], 'y': imu2_accy[i][1], 'z': imu2_accz[i][1]})
imu1.sort(key=lambda x: x['t'])
imu2.sort(key=lambda x: x['t'])
imu2_index = 0
last_t = None
xdiff_filtered = 0
ydiff_filtered = 0
zdiff_filtered = 0
max_diff_filtered = 0
for i in range(len(imu1)):
#find closest imu2 value
t = imu1[i]['t']
dt = 0 if last_t is None else t-last_t
dt=min(dt,.1)
next_imu2 = None
for i in range(imu2_index,len(imu2)):
next_imu2 = imu2[i]
imu2_index=i
if next_imu2['t'] >= t:
break
prev_imu2 = imu2[imu2_index-1]
closest_imu2 = next_imu2 if abs(next_imu2['t']-t)<abs(prev_imu2['t']-t) else prev_imu2
xdiff = imu1[i]['x']-closest_imu2['x']
ydiff = imu1[i]['y']-closest_imu2['y']
zdiff = imu1[i]['z']-closest_imu2['z']
xdiff_filtered += (xdiff-xdiff_filtered)*dt/filter_tc
ydiff_filtered += (ydiff-ydiff_filtered)*dt/filter_tc
zdiff_filtered += (zdiff-zdiff_filtered)*dt/filter_tc
diff_filtered = sqrt(xdiff_filtered**2+ydiff_filtered**2+zdiff_filtered**2)
max_diff_filtered = max(max_diff_filtered,diff_filtered)
#print max_diff_filtered
last_t = t
if max_diff_filtered > fail_threshold:
self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold)
self.result.status = TestResult.StatusType.FAIL
elif max_diff_filtered > warn_threshold:
self.result.statusMessage = "Check vibration or accelerometer calibration. (Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold,fail_threshold)
self.result.status = TestResult.StatusType.WARN
else:
self.result.statusMessage = "(Mismatch: %.2f, WARN: %.2f, FAIL: %.2f)" % (max_diff_filtered,warn_threshold, fail_threshold)
| gpl-3.0 |
boundlessgeo/qgis-connect-plugin | boundlessconnect/connect.py | 1 | 16927 | # -*- coding: utf-8 -*-
from builtins import object
import os
import re
import json
import base64
import urllib2
import tempfile
from copy import copy
import webbrowser
from qgis.PyQt.QtGui import QIcon, QCursor
from qgis.PyQt.QtCore import Qt, QUrl, QFile, QEventLoop
from qgis.PyQt.QtWidgets import QMessageBox, QApplication
from qgis.PyQt.QtNetwork import QNetworkReply, QNetworkRequest
from qgis.gui import QgsMessageBar, QgsFileDownloader
from qgis.core import QgsNetworkAccessManager, QgsRasterLayer, QgsMapLayerRegistry
from qgis.utils import iface
from qgis import utils as qgsutils
import pyplugin_installer
from pyplugin_installer.installer_data import plugins
from qgiscommons2.network.networkaccessmanager import NetworkAccessManager
from qgiscommons2.gui.settings import pluginSetting
from qgiscommons2.files import tempFilenameInTempFolder
from qgiscommons2.network.oauth2 import (oauth2_supported,
get_oauth_authcfg
)
from boundlessconnect.gui.executor import execute
from boundlessconnect import utils
from boundlessconnect import basemaputils
pluginPath = os.path.dirname(__file__)
OPEN_ROLE = "open"
PUBLIC_ROLE = "public"
SUBSCRIBE_URL = "https://connect.boundlessgeo.com/Upgrade-Subscription"
LESSONS_PLUGIN_NAME = "lessons"
RESULTS_PER_PAGE = 20
class ConnectContent(object):
def __init__(self, url, name, description, roles = ["open"]):
self.url = url
self.name = name
self.description = description
self.roles = roles
def iconPath(self):
pass
def canOpen(self, roles):
matches = [role for role in roles if role in self.roles]
return bool(matches) or (OPEN_ROLE in self.roles) or (PUBLIC_ROLE in self.roles)
def open(self, roles):
if self.canOpen(roles):
self._open()
else:
webbrowser.open_new(SUBSCRIBE_URL)
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
desc = self.description
if len(self.description) < 100:
desc = self.description + " " * (100-len(self.description))
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="{url}">OPEN</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=desc,
available=canInstall,
url=self.url)
return s
class ConnectWebAdress(ConnectContent):
def _open(self):
webbrowser.open_new(self.url)
class ConnectVideo(ConnectWebAdress):
def typeName(self):
return "Video"
class ConnectLearning(ConnectWebAdress):
def typeName(self):
return "Learning"
def iconPath(self):
return os.path.join(pluginPath, "icons", "learning.svg")
class ConnectQA(ConnectWebAdress):
def typeName(self):
return "Q & A"
def iconPath(self):
return os.path.join(pluginPath, "icons", "qa.svg")
class ConnectBlog(ConnectWebAdress):
def typeName(self):
return "Blog"
def iconPath(self):
return os.path.join(pluginPath, "icons", "blog.svg")
class ConnectDocumentation(ConnectWebAdress):
def typeName(self):
return "Documentation"
def iconPath(self):
return os.path.join(pluginPath, "icons", "doc.svg")
class ConnectDiscussion(ConnectWebAdress):
def typeName(self):
return "Discussion"
class ConnectOther(ConnectWebAdress):
def typeName(self):
return "Other"
class ConnectLesson(ConnectContent):
def typeName(self):
return "Lesson"
def iconPath(self):
return os.path.join(pluginPath, "icons", "howto.svg")
def _open(self):
if LESSONS_PLUGIN_NAME not in qgsutils.available_plugins:
iface.messageBar().pushMessage(
"Cannot install lessons",
"Lessons plugin is not installed",
QgsMessageBar.WARNING)
elif LESSONS_PLUGIN_NAME not in qgsutils.active_plugins:
iface.messageBar().pushMessage(
"Cannot install lessons",
"Lessons plugin is not active",
QgsMessageBar.WARNING)
else:
self.downloadAndInstall()
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
desc = self.description
if len(self.description) < 100:
desc = self.description + " " * (100-len(self.description))
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="{url}">INSTALL</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=desc,
available=canInstall,
url=self.url)
return s
def downloadAndInstall(self):
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
url = QUrl(self.url)
self.request = QNetworkRequest(url)
self.reply = QgsNetworkAccessManager.instance().get(self.request)
self.reply.finished.connect(self.requestFinished)
def requestFinished(self):
if self.reply.error() != QNetworkReply.NoError:
QApplication.restoreOverrideCursor()
iface.messageBar().pushMessage(
"Lessons could not be installed:\n",
self.reply.errorString(),
QgsMessageBar.WARNING)
self.reply.deleteLater()
return
f = QFile(tempFilenameInTempFolder(os.path.basename(self.url).split(".")[0]))
f.open(QFile.WriteOnly)
f.write(self.reply.readAll())
f.close()
self.reply.deleteLater()
from lessons import installLessonsFromZipFile
installLessonsFromZipFile(f.fileName())
QApplication.restoreOverrideCursor()
iface.messageBar().pushMessage(
"Completed",
"Lessons were correctly installed",
QgsMessageBar.INFO)
class ConnectPlugin(ConnectContent):
def __init__(self, plugin, roles):
self.plugin = plugin
self.name = plugin["name"]
self.description = re.sub("<p>This plugin is available.*?access</a></p>", "", plugin["description"])
self.url = plugin["download_url"]
self.roles = roles
def typeName(self):
return "Plugin"
def iconPath(self):
return os.path.join(pluginPath, "icons", "plugin.svg")
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="{url}">INSTALL</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=self.description,
available=canInstall,
url=self.url
)
return s
def _open(self):
if self.plugin["status"] == "upgradeable":
reply = QMessageBox.question(
iface.mainWindow(),
"Plugin",
"An older version of the plugin is already installed. Do you want to upgrade it?",
QMessageBox.Yes | QMessageBox.No)
if reply != QMessageBox.Yes:
return
elif self.plugin["status"] in ["not installed", "new"]:
pass
else:
reply = QMessageBox.question(
iface.mainWindow(),
"Plugin",
"The plugin is already installed. Do you want to reinstall it?",
QMessageBox.Yes | QMessageBox.No)
if reply != QMessageBox.Yes:
return
def _install():
installer = pyplugin_installer.instance()
installer.installPlugin(self.plugin["id"])
self.plugin["status"] = "installed"
execute(_install)
class ConnectBasemap(ConnectContent):
def __init__(self, url, name, description, json, roles=["open"]):
self.url = url
self.name = name
self.description = description
self.roles = roles
self.json = json
def typeName(self):
return "Basemap"
def iconPath(self):
return os.path.join(pluginPath, "icons", "map.svg")
def asHtmlEntry(self, roles):
canInstall = "Green" if self.canOpen(roles) else "Orange"
s = """<div class="icon"><div class="icon-container">
<img src="{image}"></div></div>
<div class="description"><h2>{title}</h2><p>{description}</p>
<a class="btn{available}" href="canvas{url}">ADD TO MAP</a>
<a class="btn{available}" href="project{url}">ADD TO DEFAULT PROJECT</a>
</div>
""".format(image=QUrl.fromLocalFile(self.iconPath()).toString(),
title=self.name,
description=self.description,
available=canInstall,
url=self.url
)
return s
def addToCanvas(self, roles):
if self.canOpen(roles):
if not oauth2_supported:
iface.messageBar().pushMessage(
"Cannot load basemap",
"OAuth support is not available",
QgsMessageBar.WARNING)
else:
authcfg = get_oauth_authcfg()
if authcfg is None:
iface.messageBar().pushMessage(
"Cannot load basemap",
"Cannot find a valid authentication configuration",
QgsMessageBar.WARNING)
else:
authId = authcfg.id()
layer = QgsRasterLayer('authcfg={authcfg}&type=xyz&url={url}'.format(url=urllib2.quote("{}?version={}".format(self.url, pluginSetting("apiVersion"))),
authcfg=authId), self.name, "wms")
if layer.isValid():
QgsMapLayerRegistry.instance().addMapLayer(layer)
else:
iface.messageBar().pushMessage(
"Cannot load basemap",
"Cannot create basemap layer",
QgsMessageBar.WARNING)
else:
webbrowser.open_new(SUBSCRIBE_URL)
def addToDefaultProject(self, roles):
if self.canOpen(roles):
if not oauth2_supported:
iface.messageBar().pushMessage(
"Cannot add basemap",
"OAuth support is not available",
QgsMessageBar.WARNING)
else:
authcfg = get_oauth_authcfg()
if authcfg is None:
iface.messageBar().pushMessage(
"Cannot add basemap",
"Cannot find a valid authentication configuration",
QgsMessageBar.WARNING)
else:
authId = authcfg.id()
if not basemaputils.createOrAddDefaultBasemap([self.json], [self.name], authId):
iface.messageBar().pushMessage(
"Cannot add basemap",
"Cannot update or create default project",
QgsMessageBar.WARNING)
else:
iface.messageBar().pushMessage(
"Base map added",
"Base map correctly added to default project.",
QgsMessageBar.INFO)
else:
webbrowser.open_new(SUBSCRIBE_URL)
categories = {"LC": (ConnectLearning, "Learning"),
"DOC": (ConnectDocumentation, "Documentation"),
"BLOG": (ConnectBlog, "Blog"),
"QA": (ConnectQA, "Q & A"),
"LESSON": (ConnectLesson, "Lesson")
}
_plugins = {}
def loadPlugins():
global _plugins
_plugins = {}
installer = pyplugin_installer.instance()
installer.fetchAvailablePlugins(True)
for name in plugins.all():
plugin = plugins.all()[name]
if utils.isBoundlessPlugin(plugin) and name not in ["boundlessconnect"]:
_plugins[plugin["name"]] = copy(plugin)
def search(text, category='', page=0, token=None):
if text != '':
text = '&q=' + text
searchUrl = "{}/search/?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
else:
searchUrl = "{}/search/matchAll?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
headers = {}
headers["Authorization"] = "Bearer {}".format(token)
nam = NetworkAccessManager()
if category == '':
res, content = nam.request("{}{}&si={}&c={}".format(searchUrl, text, int(page), RESULTS_PER_PAGE), headers=headers)
else:
res, content = nam.request("{}{}&cat={}&si={}&c={}".format(searchUrl, text, category, int(page), RESULTS_PER_PAGE), headers=headers)
j = json.loads(re.sub(r'[^\x00-\x7f]',r'', content))
results = []
for element in j["features"]:
props = element["properties"]
roles = props["role"].split(",")
category = props["category"]
if category != "PLUG":
title = props["title"] or props["description"].split(".")[0]
if category in categories:
results.append(categories[category][0](props["url"].replace("\n", ""),
title,
props["description"],
roles))
else:
plugin = _plugins.get(props["title"], None)
if plugin:
results.append(ConnectPlugin(plugin, roles))
return results
def findAll(text, category, token):
page = 0
results = []
data = search(text, category, page, token)
results = data
while len(data) == RESULTS_PER_PAGE:
page += 1
data = search(text, category, page, token)
results.extend(data)
return results
def searchBasemaps(text, token):
searchUrl = "{}/basemaps?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
headers = {}
headers["Authorization"] = "Bearer {}".format(token)
nam = NetworkAccessManager()
res, content = nam.request(searchUrl, headers=headers)
try:
j = json.loads(content)
except:
raise Exception("Unable to parse server reply.")
maps = [l for l in j if basemaputils.isSupported(l)]
results = []
if text == '':
for item in maps:
results.append(
ConnectBasemap(item["endpoint"],
item["name"],
item["description"],
item,
item["accessList"]))
else:
for item in maps:
if text.lower() in item["name"].lower() or text.lower() in item["description"].lower():
results.append(
ConnectBasemap(item["endpoint"],
item["name"],
item["description"],
item,
item["accessList"]))
return results
token = None
def getToken(login, password):
global token
if token:
return token
token = None
payload = {"username": login,
"password": password}
headers = {}
headers["Content-Type"] = "application/json"
url = "{}/token?version={}".format(pluginSetting("connectEndpoint"), pluginSetting("apiVersion"))
nam = NetworkAccessManager()
try:
res, data = nam.request(url, method="POST", body=json.dumps(payload), headers=headers)
except Exception as e:
return token
try:
responce = json.loads(str(data))
token = responce["token"]
except:
pass
return token
def resetToken():
global token
token = None
| gpl-2.0 |
WillianPaiva/1flow | oneflow/core/migrations/0045_auto__chg_field_subscription_name.py | 2 | 37393 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Subscription.name'
db.alter_column(u'core_subscription', 'name', self.gf('django.db.models.fields.CharField')(max_length=255, null=True))
def backwards(self, orm):
# User chose to not deal with backwards NULL issues for 'Subscription.name'
raise RuntimeError("Cannot reverse this migration. 'Subscription.name' and its values cannot be restored.")
# The following code is provided here to aid in writing a correct migration
# Changing field 'Subscription.name'
db.alter_column(u'core_subscription', 'name', self.gf('django.db.models.fields.CharField')(max_length=255))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'base.user': {
'Meta': {'object_name': 'User'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'}),
'email_announcements': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
'hash_codes': ('jsonfield.fields.JSONField', [], {'default': "{'unsubscribe': '29cb5501a4d84e55b4958240104cbe40'}", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'register_data': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'sent_emails': ('jsonfield.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '254', 'db_index': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'core.article': {
'Meta': {'object_name': 'Article', '_ormbases': ['core.BaseItem']},
u'baseitem_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseItem']", 'unique': 'True', 'primary_key': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'date_published': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'excerpt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'is_orphaned': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publishers': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'publications'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'url_absolute': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'url_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'word_count': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
'core.author': {
'Meta': {'unique_together': "(('name', 'website'),)", 'object_name': 'Author'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Author']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_unsure': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'origin_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'})
},
'core.basefeed': {
'Meta': {'object_name': 'BaseFeed'},
'closed_reason': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_closed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_fetch': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'errors': ('json_field.fields.JSONField', [], {'default': '[]', 'blank': 'True'}),
'fetch_interval': ('django.db.models.fields.IntegerField', [], {'default': '43200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_internal': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.BaseItem']"}),
'languages': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Language']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'options': ('json_field.fields.JSONField', [], {'default': '{}', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.basefeed_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.baseitem': {
'Meta': {'object_name': 'BaseItem'},
'authors': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'authored_items'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Author']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'default_rating': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.BaseItem']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'origin': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.baseitem_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'sources': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'sources_rel_+'", 'null': 'True', 'to': "orm['core.BaseItem']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'items'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'text_direction': ('django.db.models.fields.CharField', [], {'default': "u'ltr'", 'max_length': '3'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']", 'null': 'True', 'blank': 'True'})
},
'core.combinedfeed': {
'Meta': {'object_name': 'CombinedFeed'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_restricted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.combinedfeedrule': {
'Meta': {'ordering': "('position',)", 'object_name': 'CombinedFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'combinedfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.CombinedFeed']"}),
'feeds': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['core.BaseFeed']", 'symmetrical': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.corepermissions': {
'Meta': {'object_name': 'CorePermissions'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'core.folder': {
'Meta': {'unique_together': "(('name', 'user', 'parent'),)", 'object_name': 'Folder'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Folder']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'folders'", 'to': u"orm['base.User']"})
},
'core.helpcontent': {
'Meta': {'ordering': "['ordering', 'id']", 'object_name': 'HelpContent'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'content_en': ('django.db.models.fields.TextField', [], {}),
'content_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'content_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'name_fr': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'name_nt': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'ordering': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'core.historyentry': {
'Meta': {'object_name': 'HistoryEntry'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'polymorphic_ctype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'polymorphic_core.historyentry_set'", 'null': 'True', 'to': u"orm['contenttypes.ContentType']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"})
},
'core.language': {
'Meta': {'object_name': 'Language'},
'dj_code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.Language']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.mailaccount': {
'Meta': {'unique_together': "(('user', 'hostname', 'username'),)", 'object_name': 'MailAccount'},
'conn_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_last_conn': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2007, 1, 1, 0, 0)'}),
'hostname': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_usable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'port': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'use_ssl': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['base.User']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'})
},
'core.mailfeed': {
'Meta': {'object_name': 'MailFeed', '_ormbases': ['core.BaseFeed']},
'account': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'mail_feeds'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.MailAccount']"}),
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'finish_action': ('django.db.models.fields.CharField', [], {'default': "u'markread'", 'max_length': '10'}),
'match_action': ('django.db.models.fields.CharField', [], {'default': "u'scrape'", 'max_length': '10'}),
'rules_operation': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10'}),
'scrape_blacklist': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'scrape_whitelist': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'})
},
'core.mailfeedrule': {
'Meta': {'ordering': "('group', 'position')", 'object_name': 'MailFeedRule'},
'check_error': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '255', 'blank': 'True'}),
'clone_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeedRule']", 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'group_operation': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10', 'null': 'True', 'blank': 'True'}),
'header_field': ('django.db.models.fields.CharField', [], {'default': "u'any'", 'max_length': '10'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mailfeed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.MailFeed']"}),
'match_case': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'match_type': ('django.db.models.fields.CharField', [], {'default': "u'contains'", 'max_length': '10'}),
'match_value': ('django.db.models.fields.CharField', [], {'default': "u''", 'max_length': '1024'}),
'other_header': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'})
},
'core.originaldata': {
'Meta': {'object_name': 'OriginalData'},
'feedparser': ('django.db.models.fields.TextField', [], {}),
'feedparser_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'google_reader': ('django.db.models.fields.TextField', [], {}),
'google_reader_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'item': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'original_data'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['core.BaseItem']"}),
'raw_email': ('django.db.models.fields.TextField', [], {}),
'raw_email_processed': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'core.read': {
'Meta': {'unique_together': "(('user', 'item'),)", 'object_name': 'Read'},
'bookmark_type': ('django.db.models.fields.CharField', [], {'default': "u'U'", 'max_length': '2'}),
'check_set_subscriptions_131004_done': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'date_analysis': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_archived': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_auto_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_bookmarked': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'date_fact': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_fun': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowhow': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_knowledge': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_number': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_prospective': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_quote': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_read': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_rules': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_starred': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_analysis': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_archived': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_auto_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_bookmarked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fact': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_fun': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_good': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowhow': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_knowledge': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_number': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_prospective': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_quote': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_read': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_rules': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_starred': ('django.db.models.fields.NullBooleanField', [], {'null': 'True', 'blank': 'True'}),
'item': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reads'", 'to': "orm['core.BaseItem']"}),
'knowledge_type': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'senders': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'reads_sent'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['base.User']"}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'reads'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_reads'", 'to': u"orm['base.User']"})
},
'core.rssatomfeed': {
'Meta': {'object_name': 'RssAtomFeed', '_ormbases': ['core.BaseFeed']},
u'basefeed_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.BaseFeed']", 'unique': 'True', 'primary_key': 'True'}),
'last_etag': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'last_modified': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200'}),
'website': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'})
},
'core.simpletag': {
'Meta': {'unique_together': "(('name', 'language'),)", 'object_name': 'SimpleTag'},
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.SimpleTag']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.Language']", 'null': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'origin_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'origin_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.SimpleTag']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'core.subscription': {
'Meta': {'unique_together': "(('feed', 'user'),)", 'object_name': 'Subscription'},
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subscriptions'", 'blank': 'True', 'to': "orm['core.BaseFeed']"}),
'folders': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Folder']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'items': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Read']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'subscriptions'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.SimpleTag']"}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'thumbnail_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'all_subscriptions'", 'blank': 'True', 'to': u"orm['base.User']"})
},
'core.userfeeds': {
'Meta': {'object_name': 'UserFeeds'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['core.BaseFeed']", 'null': 'True', 'blank': 'True'}),
'imported_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'imported_items_feeds'", 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'received_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'received_items_feeds'", 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'sent_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sent_items_feeds'", 'null': 'True', 'to': "orm['core.BaseFeed']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'feeds'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'written_items_feeds'", 'null': 'True', 'to': "orm['core.BaseFeed']"})
},
'core.userimport': {
'Meta': {'object_name': 'UserImport', '_ormbases': ['core.HistoryEntry']},
'date_finished': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'historyentry_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['core.HistoryEntry']", 'unique': 'True', 'primary_key': 'True'}),
'lines': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'results': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'urls': ('django.db.models.fields.TextField', [], {})
},
'core.usersubscriptions': {
'Meta': {'object_name': 'UserSubscriptions'},
'blogs': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'blogs'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['core.Subscription']"}),
'imported_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'imported_items_subscriptions'", 'null': 'True', 'to': "orm['core.Subscription']"}),
'received_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'received_items_subscriptions'", 'null': 'True', 'to': "orm['core.Subscription']"}),
'sent_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'sent_items_subscriptions'", 'null': 'True', 'to': "orm['core.Subscription']"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'subscriptions'", 'unique': 'True', 'primary_key': 'True', 'to': u"orm['base.User']"}),
'written_items': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'written_items_subscriptions'", 'null': 'True', 'to': "orm['core.Subscription']"})
},
'core.website': {
'Meta': {'object_name': 'WebSite'},
'description_en': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_fr': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'description_nt': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'duplicate_of': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['core.WebSite']", 'null': 'True', 'blank': 'True'}),
'fetch_limit_nr': ('django.db.models.fields.IntegerField', [], {'default': '16', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'image_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'mail_warned': ('jsonfield.fields.JSONField', [], {'default': "u'[]'", 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['core.WebSite']"}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'short_description_en': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_fr': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'short_description_nt': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['core'] | agpl-3.0 |
minorua/QGIS | python/plugins/processing/gui/PostgisTableSelector.py | 13 | 3823 | # -*- coding: utf-8 -*-
"""
***************************************************************************
PostgisTableSelector.py
---------------------
Date : November 2015
Copyright : (C) 2015 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'November 2015'
__copyright__ = '(C) 2015, Victor Olaya'
import os
import warnings
from qgis.PyQt.QtGui import QIcon
from qgis.PyQt.QtWidgets import QTreeWidgetItem, QMessageBox
from qgis.PyQt import uic
from qgis.core import QgsSettings
from processing.tools.postgis import GeoDB
pluginPath = os.path.split(os.path.dirname(__file__))[0]
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgPostgisTableSelector.ui'))
class PostgisTableSelector(BASE, WIDGET):
def __init__(self, parent, tablename):
super(PostgisTableSelector, self).__init__(parent)
self.connection = None
self.table = None
self.schema = None
self.setupUi(self)
settings = QgsSettings()
settings.beginGroup('/PostgreSQL/connections/')
names = settings.childGroups()
settings.endGroup()
for n in names:
item = ConnectionItem(n)
self.treeConnections.addTopLevelItem(item)
def itemExpanded(item):
try:
item.populateSchemas()
except:
pass
self.treeConnections.itemExpanded.connect(itemExpanded)
self.textTableName.setText(tablename)
self.buttonBox.accepted.connect(self.okPressed)
self.buttonBox.rejected.connect(self.cancelPressed)
def cancelPressed(self):
self.close()
def okPressed(self):
if self.textTableName.text().strip() == "":
self.textTableName.setStyleSheet("QLineEdit{background: yellow}")
return
item = self.treeConnections.currentItem()
if isinstance(item, ConnectionItem):
QMessageBox.warning(self, "Wrong selection", "Select a schema item in the tree")
return
self.schema = item.text(0)
self.table = self.textTableName.text().strip()
self.connection = item.parent().text(0)
self.close()
class ConnectionItem(QTreeWidgetItem):
def __init__(self, connection):
self.connIcon = QIcon(os.path.dirname(__file__) + '/../images/postgis.png')
self.schemaIcon = QIcon(os.path.dirname(__file__) + '/../images/namespace.png')
QTreeWidgetItem.__init__(self)
self.setChildIndicatorPolicy(QTreeWidgetItem.ShowIndicator)
self.connection = connection
self.setText(0, connection)
self.setIcon(0, self.connIcon)
def populateSchemas(self):
if self.childCount() != 0:
return
geodb = GeoDB.from_name(self.connection)
schemas = geodb.list_schemas()
for oid, name, owner, perms in schemas:
item = QTreeWidgetItem()
item.setText(0, name)
item.setIcon(0, self.schemaIcon)
self.addChild(item)
| gpl-2.0 |
dataxu/ansible | lib/ansible/modules/network/aci/aci_tenant_span_dst_group.py | 14 | 6644 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: aci_tenant_span_dst_group
short_description: Manage SPAN destination groups (span:DestGrp)
description:
- Manage SPAN destination groups on Cisco ACI fabrics.
notes:
- The C(tenant) used must exist before using this module in your playbook.
The M(aci_tenant) module can be used for this.
- More information about the internal APIC class B(span:DestGrp) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
version_added: '2.4'
options:
dst_group:
description:
- The name of the SPAN destination group.
required: yes
aliases: [ name ]
description:
description:
- The description of the SPAN destination group.
aliases: [ descr ]
tenant:
description:
- The name of the tenant.
required: yes
aliases: [ tenant_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
# FIXME: Add more, better examples
EXAMPLES = r'''
- aci_tenant_span_dst_group:
host: apic
username: admin
password: SomeSecretPassword
dst_group: '{{ dst_group }}'
description: '{{ descr }}'
tenant: '{{ tenant }}'
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
dst_group=dict(type='str', required=False, aliases=['name']), # Not required for querying all objects
tenant=dict(type='str', required=False, aliases=['tenant_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6
protocol=dict(type='str', removed_in_version='2.6'), # Deprecated in v2.6
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['dst_group', 'tenant']],
['state', 'present', ['dst_group', 'tenant']],
],
)
dst_group = module.params['dst_group']
description = module.params['description']
state = module.params['state']
tenant = module.params['tenant']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
filter_target='eq(fvTenant.name, "{0}")'.format(tenant),
module_object=tenant,
),
subclass_1=dict(
aci_class='spanDestGrp',
aci_rn='destgrp-{0}'.format(dst_group),
filter_target='eq(spanDestGrp.name, "{0}")'.format(dst_group),
module_object=dst_group,
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='spanDestGrp',
class_config=dict(
name=dst_group,
descr=description,
),
)
aci.get_diff(aci_class='spanDestGrp')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
wwj718/murp-edx | cms/djangoapps/contentstore/features/problem-editor.py | 7 | 12170 | # disable missing docstring
# pylint: disable=C0111
import json
from lettuce import world, step
from nose.tools import assert_equal, assert_true # pylint: disable=E0611
from common import type_in_codemirror, open_new_course
from advanced_settings import change_value, ADVANCED_MODULES_KEY
from course_import import import_file
DISPLAY_NAME = "Display Name"
MAXIMUM_ATTEMPTS = "Maximum Attempts"
PROBLEM_WEIGHT = "Problem Weight"
RANDOMIZATION = 'Randomization'
SHOW_ANSWER = "Show Answer"
TIMER_BETWEEN_ATTEMPTS = "Timer Between Attempts"
MATLAB_API_KEY = "Matlab API key"
@step('I have created a Blank Common Problem$')
def i_created_blank_common_problem(step):
world.create_course_with_unit()
step.given("I have created another Blank Common Problem")
@step('I have created a unit with advanced module "(.*)"$')
def i_created_unit_with_advanced_module(step, advanced_module):
world.create_course_with_unit()
url = world.browser.url
step.given("I select the Advanced Settings")
change_value(step, ADVANCED_MODULES_KEY, '["{}"]'.format(advanced_module))
world.visit(url)
world.wait_for_xmodule()
@step('I have created an advanced component "(.*)" of type "(.*)"')
def i_create_new_advanced_component(step, component_type, advanced_component):
world.create_component_instance(
step=step,
category='advanced',
component_type=component_type,
advanced_component=advanced_component
)
@step('I have created another Blank Common Problem$')
def i_create_new_common_problem(step):
world.create_component_instance(
step=step,
category='problem',
component_type='Blank Common Problem'
)
@step('when I mouseover on "(.*)"')
def i_mouseover_on_html_component(step, element_class):
action_css = '.{}'.format(element_class)
world.trigger_event(action_css, event='mouseover')
@step(u'I can see Reply to Annotation link$')
def i_see_reply_to_annotation_link(_step):
css_selector = 'a.annotatable-reply'
world.wait_for_visible(css_selector)
@step(u'I see that page has scrolled "(.*)" when I click on "(.*)" link$')
def i_see_annotation_problem_page_scrolls(_step, scroll_direction, link_css):
scroll_js = "$(window).scrollTop();"
scroll_height_before = world.browser.evaluate_script(scroll_js)
world.css_click("a.{}".format(link_css))
scroll_height_after = world.browser.evaluate_script(scroll_js)
if scroll_direction == "up":
assert scroll_height_after < scroll_height_before
elif scroll_direction == "down":
assert scroll_height_after > scroll_height_before
@step('I have created an advanced problem of type "(.*)"$')
def i_create_new_advanced_problem(step, component_type):
world.create_component_instance(
step=step,
category='problem',
component_type=component_type,
is_advanced=True
)
@step('I edit and select Settings$')
def i_edit_and_select_settings(_step):
world.edit_component_and_select_settings()
@step('I see the advanced settings and their expected values$')
def i_see_advanced_settings_with_values(step):
world.verify_all_setting_entries(
[
[DISPLAY_NAME, "Blank Common Problem", True],
[MATLAB_API_KEY, "", False],
[MAXIMUM_ATTEMPTS, "", False],
[PROBLEM_WEIGHT, "", False],
[RANDOMIZATION, "Never", False],
[SHOW_ANSWER, "Finished", False],
[TIMER_BETWEEN_ATTEMPTS, "0", False],
])
@step('I can modify the display name')
def i_can_modify_the_display_name(_step):
# Verifying that the display name can be a string containing a floating point value
# (to confirm that we don't throw an error because it is of the wrong type).
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, '3.4')
verify_modified_display_name()
@step('my display name change is persisted on save')
def my_display_name_change_is_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_modified_display_name()
@step('the problem display name is "(.*)"$')
def verify_problem_display_name(step, name):
assert_equal(name.upper(), world.browser.find_by_css('.problem-header').text)
@step('I can specify special characters in the display name')
def i_can_modify_the_display_name_with_special_chars(_step):
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, "updated ' \" &")
verify_modified_display_name_with_special_chars()
@step('my special characters and persisted on save')
def special_chars_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_modified_display_name_with_special_chars()
@step('I can revert the display name to unset')
def can_revert_display_name_to_unset(_step):
world.revert_setting_entry(DISPLAY_NAME)
verify_unset_display_name()
@step('my display name is unset on save')
def my_display_name_is_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_unset_display_name()
@step('I can select Per Student for Randomization')
def i_can_select_per_student_for_randomization(_step):
world.browser.select(RANDOMIZATION, "Per Student")
verify_modified_randomization()
@step('my change to randomization is persisted')
def my_change_to_randomization_is_persisted(step):
world.save_component_and_reopen(step)
verify_modified_randomization()
@step('I can revert to the default value for randomization')
def i_can_revert_to_default_for_randomization(step):
world.revert_setting_entry(RANDOMIZATION)
world.save_component_and_reopen(step)
world.verify_setting_entry(world.get_setting_entry(RANDOMIZATION), RANDOMIZATION, "Never", False)
@step('I can set the weight to "(.*)"?')
def i_can_set_weight(_step, weight):
set_weight(weight)
verify_modified_weight()
@step('my change to weight is persisted')
def my_change_to_weight_is_persisted(step):
world.save_component_and_reopen(step)
verify_modified_weight()
@step('I can revert to the default value of unset for weight')
def i_can_revert_to_default_for_unset_weight(step):
world.revert_setting_entry(PROBLEM_WEIGHT)
world.save_component_and_reopen(step)
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", False)
@step('if I set the weight to "(.*)", it remains unset')
def set_the_weight_to_abc(step, bad_weight):
set_weight(bad_weight)
# We show the clear button immediately on type, hence the "True" here.
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", True)
world.save_component_and_reopen(step)
# But no change was actually ever sent to the model, so on reopen, explicitly_set is False
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", False)
@step('if I set the max attempts to "(.*)", it will persist as a valid integer$')
def set_the_max_attempts(step, max_attempts_set):
# on firefox with selenium, the behavior is different.
# eg 2.34 displays as 2.34 and is persisted as 2
index = world.get_setting_entry_index(MAXIMUM_ATTEMPTS)
world.set_field_value(index, max_attempts_set)
world.save_component_and_reopen(step)
value = world.css_value('input.setting-input', index=index)
assert value != "", "max attempts is blank"
assert int(value) >= 0
@step('Edit High Level Source is not visible')
def edit_high_level_source_not_visible(step):
verify_high_level_source_links(step, False)
@step('Edit High Level Source is visible')
def edit_high_level_source_links_visible(step):
verify_high_level_source_links(step, True)
@step('If I press Cancel my changes are not persisted')
def cancel_does_not_save_changes(step):
world.cancel_component(step)
step.given("I edit and select Settings")
step.given("I see the advanced settings and their expected values")
@step('I have enabled latex compiler')
def enable_latex_compiler(step):
url = world.browser.url
step.given("I select the Advanced Settings")
change_value(step, 'Enable LaTeX Compiler', 'true')
world.visit(url)
world.wait_for_xmodule()
@step('I have created a LaTeX Problem')
def create_latex_problem(step):
world.create_course_with_unit()
step.given('I have enabled latex compiler')
world.create_component_instance(
step=step,
category='problem',
component_type='Problem Written in LaTeX',
is_advanced=True
)
@step('I edit and compile the High Level Source')
def edit_latex_source(_step):
open_high_level_source()
type_in_codemirror(1, "hi")
world.css_click('.hls-compile')
@step('my change to the High Level Source is persisted')
def high_level_source_persisted(_step):
def verify_text(driver):
css_sel = '.problem div>span'
return world.css_text(css_sel) == 'hi'
world.wait_for(verify_text, timeout=10)
@step('I view the High Level Source I see my changes')
def high_level_source_in_editor(_step):
open_high_level_source()
assert_equal('hi', world.css_value('.source-edit-box'))
@step(u'I have an empty course')
def i_have_empty_course(step):
open_new_course()
@step(u'I import the file "([^"]*)"$')
def i_import_the_file(_step, filename):
import_file(filename)
@step(u'I go to the vertical "([^"]*)"$')
def i_go_to_vertical(_step, vertical):
world.css_click("span:contains('{0}')".format(vertical))
@step(u'I go to the unit "([^"]*)"$')
def i_go_to_unit(_step, unit):
loc = "window.location = $(\"span:contains('{0}')\").closest('a').attr('href')".format(unit)
world.browser.execute_script(loc)
@step(u'I see a message that says "([^"]*)"$')
def i_can_see_message(_step, msg):
msg = json.dumps(msg) # escape quotes
world.css_has_text("h2.title", msg)
@step(u'I can edit the problem$')
def i_can_edit_problem(_step):
world.edit_component()
@step(u'I edit first blank advanced problem for annotation response$')
def i_edit_blank_problem_for_annotation_response(_step):
edit_css = """$('.component-header:contains("Blank Advanced Problem")').parent().find('a.edit-button').click()"""
text = """
<problem>
<annotationresponse>
<annotationinput><text>Text of annotation</text></annotationinput>
</annotationresponse>
</problem>"""
world.browser.execute_script(edit_css)
world.wait_for_ajax_complete()
type_in_codemirror(0, text)
world.save_component()
@step(u'I can see cheatsheet$')
def verify_cheat_sheet_displaying(_step):
world.css_click("a.cheatsheet-toggle")
css_selector = 'article.simple-editor-cheatsheet'
world.wait_for_visible(css_selector)
def verify_high_level_source_links(step, visible):
if visible:
assert_true(world.is_css_present('.launch-latex-compiler'),
msg="Expected to find the latex button but it is not present.")
else:
assert_true(world.is_css_not_present('.launch-latex-compiler'),
msg="Expected not to find the latex button but it is present.")
world.cancel_component(step)
def verify_modified_weight():
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "3.5", True)
def verify_modified_randomization():
world.verify_setting_entry(world.get_setting_entry(RANDOMIZATION), RANDOMIZATION, "Per Student", True)
def verify_modified_display_name():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, '3.4', True)
def verify_modified_display_name_with_special_chars():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, "updated ' \" &", True)
def verify_unset_display_name():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, 'Blank Advanced Problem', False)
def set_weight(weight):
index = world.get_setting_entry_index(PROBLEM_WEIGHT)
world.set_field_value(index, weight)
def open_high_level_source():
world.edit_component()
world.css_click('.launch-latex-compiler > a')
| agpl-3.0 |
tomwire/AutobahnPython | examples/asyncio/websocket/testee/testee_server.py | 2 | 3666 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import autobahn
from autobahn.asyncio.websocket import WebSocketServerProtocol, \
WebSocketServerFactory
from autobahn.websocket.compress import *
class TesteeServerProtocol(WebSocketServerProtocol):
def onMessage(self, payload, isBinary):
self.sendMessage(payload, isBinary)
class StreamingTesteeServerProtocol(WebSocketServerProtocol):
def onMessageBegin(self, isBinary):
WebSocketServerProtocol.onMessageBegin(self, isBinary)
self.beginMessage(isBinary)
def onMessageFrameBegin(self, length):
WebSocketServerProtocol.onMessageFrameBegin(self, length)
self.beginMessageFrame(length)
def onMessageFrameData(self, payload):
self.sendMessageFrameData(payload)
def onMessageFrameEnd(self):
pass
def onMessageEnd(self):
self.endMessage()
class TesteeServerFactory(WebSocketServerFactory):
# protocol = TesteeServerProtocol
protocol = StreamingTesteeServerProtocol
def __init__(self, url, debug=False, ident=None):
if ident is not None:
server = ident
else:
server = "AutobahnPython-Asyncio/%s" % autobahn.version
WebSocketServerFactory.__init__(self, url, debug=debug, debugCodePaths=debug, server=server)
self.setProtocolOptions(failByDrop=False) # spec conformance
self.setProtocolOptions(failByDrop=True) # needed for streaming mode
# self.setProtocolOptions(utf8validateIncoming = False)
# enable permessage-deflate
##
def accept(offers):
for offer in offers:
if isinstance(offer, PerMessageDeflateOffer):
return PerMessageDeflateOfferAccept(offer)
self.setProtocolOptions(perMessageCompressionAccept=accept)
if __name__ == '__main__':
try:
import asyncio
except ImportError:
# Trollius >= 0.3 was renamed
import trollius as asyncio
factory = TesteeServerFactory("ws://127.0.0.1:9002", debug=False)
loop = asyncio.get_event_loop()
coro = loop.create_server(factory, port=9002)
server = loop.run_until_complete(coro)
try:
loop.run_forever()
except KeyboardInterrupt:
pass
finally:
server.close()
loop.close()
| mit |
astagi/django-cms | cms/admin/permissionadmin.py | 4 | 7322 | # -*- coding: utf-8 -*-
from copy import deepcopy
from django.contrib import admin
from django.contrib.admin import site
from django.contrib.auth.admin import UserAdmin
from django.utils.translation import ugettext as _
from cms.admin.forms import GlobalPagePermissionAdminForm, PagePermissionInlineAdminForm, ViewRestrictionInlineAdminForm
from cms.exceptions import NoPermissionsException
from cms.models import Page, PagePermission, GlobalPagePermission, PageUser
from cms.utils.compat.dj import get_user_model
from cms.utils.conf import get_cms_setting
from cms.utils.helpers import classproperty
from cms.utils.permissions import get_user_permission_level
PERMISSION_ADMIN_INLINES = []
user_model = get_user_model()
admin_class = UserAdmin
for model, admin_instance in site._registry.items():
if model == user_model:
admin_class = admin_instance.__class__
class TabularInline(admin.TabularInline):
pass
class PagePermissionInlineAdmin(TabularInline):
model = PagePermission
# use special form, so we can override of user and group field
form = PagePermissionInlineAdminForm
classes = ['collapse', 'collapsed']
exclude = ['can_view']
extra = 0 # edit page load time boost
@classproperty
def raw_id_fields(cls):
# Dynamically set raw_id_fields based on settings
threshold = get_cms_setting('RAW_ID_USERS')
if threshold and get_user_model().objects.count() > threshold:
return ['user']
return []
def get_queryset(self, request):
"""
Queryset change, so user with global change permissions can see
all permissions. Otherwise can user see only permissions for
peoples which are under him (he can't see his permissions, because
this will lead to violation, when he can add more power to itself)
"""
# can see only permissions for users which are under him in tree
# here an exception can be thrown
try:
qs = PagePermission.objects.subordinate_to_user(request.user)
return qs.filter(can_view=False)
except NoPermissionsException:
return self.objects.get_empty_query_set()
def get_formset(self, request, obj=None, **kwargs):
"""
Some fields may be excluded here. User can change only
permissions which are available for him. E.g. if user does not haves
can_publish flag, he can't change assign can_publish permissions.
"""
exclude = self.exclude or []
if obj:
if not obj.has_add_permission(request):
exclude.append('can_add')
if not obj.has_delete_permission(request):
exclude.append('can_delete')
if not obj.has_publish_permission(request):
exclude.append('can_publish')
if not obj.has_advanced_settings_permission(request):
exclude.append('can_change_advanced_settings')
if not obj.has_move_page_permission(request):
exclude.append('can_move_page')
formset_cls = super(PagePermissionInlineAdmin, self
).get_formset(request, obj=None, exclude=exclude, **kwargs)
qs = self.get_queryset(request)
if obj is not None:
qs = qs.filter(page=obj)
formset_cls._queryset = qs
return formset_cls
class ViewRestrictionInlineAdmin(PagePermissionInlineAdmin):
extra = 0 # edit page load time boost
form = ViewRestrictionInlineAdminForm
verbose_name = _("View restriction")
verbose_name_plural = _("View restrictions")
exclude = [
'can_add', 'can_change', 'can_delete', 'can_view',
'can_publish', 'can_change_advanced_settings', 'can_move_page',
'can_change_permissions'
]
def get_formset(self, request, obj=None, **kwargs):
"""
Some fields may be excluded here. User can change only permissions
which are available for him. E.g. if user does not haves can_publish
flag, he can't change assign can_publish permissions.
"""
formset_cls = super(PagePermissionInlineAdmin, self).get_formset(request, obj, **kwargs)
qs = self.get_queryset(request)
if obj is not None:
qs = qs.filter(page=obj)
formset_cls._queryset = qs
return formset_cls
def get_queryset(self, request):
"""
Returns a QuerySet of all model instances that can be edited by the
admin site. This is used by changelist_view.
"""
qs = PagePermission.objects.subordinate_to_user(request.user)
return qs.filter(can_view=True)
class GlobalPagePermissionAdmin(admin.ModelAdmin):
list_display = ['user', 'group', 'can_change', 'can_delete', 'can_publish', 'can_change_permissions']
list_filter = ['user', 'group', 'can_change', 'can_delete', 'can_publish', 'can_change_permissions']
form = GlobalPagePermissionAdminForm
search_fields = []
for field in admin_class.search_fields:
search_fields.append("user__%s" % field)
search_fields.append('group__name')
exclude = []
list_display.append('can_change_advanced_settings')
list_filter.append('can_change_advanced_settings')
class GenericCmsPermissionAdmin(object):
"""
Custom mixin for permission-enabled admin interfaces.
"""
def update_permission_fieldsets(self, request, obj=None):
"""
Nobody can grant more than he haves, so check for user permissions
to Page and User model and render fieldset depending on them.
"""
fieldsets = deepcopy(self.fieldsets)
perm_models = (
(Page, _('Page permissions')),
(PageUser, _('User & Group permissions')),
(PagePermission, _('Page permissions management')),
)
for i, perm_model in enumerate(perm_models):
model, title = perm_model
opts, fields = model._meta, []
name = model.__name__.lower()
for t in ('add', 'change', 'delete'):
fn = getattr(opts, 'get_%s_permission' % t)
if request.user.has_perm(opts.app_label + '.' + fn()):
fields.append('can_%s_%s' % (t, name))
if fields:
fieldsets.insert(2 + i, (title, {'fields': (fields,)}))
return fieldsets
def _has_change_permissions_permission(self, request):
"""
User is able to add/change objects only if he haves can change
permission on some page.
"""
try:
get_user_permission_level(request.user)
except NoPermissionsException:
return False
return True
def has_add_permission(self, request):
return self._has_change_permissions_permission(request) and \
super(self.__class__, self).has_add_permission(request)
def has_change_permission(self, request, obj=None):
return self._has_change_permissions_permission(request) and \
super(self.__class__, self).has_change_permission(request, obj)
if get_cms_setting('PERMISSION'):
admin.site.register(GlobalPagePermission, GlobalPagePermissionAdmin)
PERMISSION_ADMIN_INLINES.extend([
ViewRestrictionInlineAdmin,
PagePermissionInlineAdmin,
])
| bsd-3-clause |
11craft/pelican | pelican/tools/pelican_import.py | 5 | 31970 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
import argparse
try:
from html import unescape # py3.4+
except ImportError:
from six.moves.html_parser import HTMLParser
unescape = HTMLParser().unescape
import os
import re
import subprocess
import sys
import time
import logging
from codecs import open
from six.moves.urllib.error import URLError
from six.moves.urllib.parse import urlparse
from six.moves.urllib.request import urlretrieve
# pelican.log has to be the first pelican module to be loaded
# because logging.setLoggerClass has to be called before logging.getLogger
from pelican.log import init
from pelican.utils import slugify, SafeDatetime
logger = logging.getLogger(__name__)
def decode_wp_content(content, br=True):
pre_tags = {}
if content.strip() == "":
return ""
content += "\n"
if "<pre" in content:
pre_parts = content.split("</pre>")
last_pre = pre_parts.pop()
content = ""
pre_index = 0
for pre_part in pre_parts:
start = pre_part.find("<pre")
if start == -1:
content = content + pre_part
continue
name = "<pre wp-pre-tag-{0}></pre>".format(pre_index)
pre_tags[name] = pre_part[start:] + "</pre>"
content = content + pre_part[0:start] + name
pre_index += 1
content = content + last_pre
content = re.sub(r'<br />\s*<br />', "\n\n", content)
allblocks = ('(?:table|thead|tfoot|caption|col|colgroup|tbody|tr|'
'td|th|div|dl|dd|dt|ul|ol|li|pre|select|option|form|'
'map|area|blockquote|address|math|style|p|h[1-6]|hr|'
'fieldset|noscript|samp|legend|section|article|aside|'
'hgroup|header|footer|nav|figure|figcaption|details|'
'menu|summary)')
content = re.sub(r'(<' + allblocks + r'[^>]*>)', "\n\\1", content)
content = re.sub(r'(</' + allblocks + r'>)', "\\1\n\n", content)
# content = content.replace("\r\n", "\n")
if "<object" in content:
# no <p> inside object/embed
content = re.sub(r'\s*<param([^>]*)>\s*', "<param\\1>", content)
content = re.sub(r'\s*</embed>\s*', '</embed>', content)
# content = re.sub(r'/\n\n+/', '\n\n', content)
pgraphs = filter(lambda s: s != "", re.split(r'\n\s*\n', content))
content = ""
for p in pgraphs:
content = content + "<p>" + p.strip() + "</p>\n"
# under certain strange conditions it could create a P of entirely whitespace
content = re.sub(r'<p>\s*</p>', '', content)
content = re.sub(r'<p>([^<]+)</(div|address|form)>', "<p>\\1</p></\\2>", content)
# don't wrap tags
content = re.sub(r'<p>\s*(</?' + allblocks + r'[^>]*>)\s*</p>', "\\1", content)
#problem with nested lists
content = re.sub(r'<p>(<li.*)</p>', "\\1", content)
content = re.sub(r'<p><blockquote([^>]*)>', "<blockquote\\1><p>", content)
content = content.replace('</blockquote></p>', '</p></blockquote>')
content = re.sub(r'<p>\s*(</?' + allblocks + '[^>]*>)', "\\1", content)
content = re.sub(r'(</?' + allblocks + '[^>]*>)\s*</p>', "\\1", content)
if br:
def _preserve_newline(match):
return match.group(0).replace("\n", "<WPPreserveNewline />")
content = re.sub(r'/<(script|style).*?<\/\\1>/s', _preserve_newline, content)
# optionally make line breaks
content = re.sub(r'(?<!<br />)\s*\n', "<br />\n", content)
content = content.replace("<WPPreserveNewline />", "\n")
content = re.sub(r'(</?' + allblocks + r'[^>]*>)\s*<br />', "\\1", content)
content = re.sub(r'<br />(\s*</?(?:p|li|div|dl|dd|dt|th|pre|td|ul|ol)[^>]*>)', '\\1', content)
content = re.sub(r'\n</p>', "</p>", content)
if pre_tags:
def _multi_replace(dic, string):
pattern = r'|'.join(map(re.escape, dic.keys()))
return re.sub(pattern, lambda m: dic[m.group()], string)
content = _multi_replace(pre_tags, content)
return content
def get_items(xml):
"""Opens a wordpress xml file and returns a list of items"""
try:
from bs4 import BeautifulSoup
except ImportError:
error = ('Missing dependency '
'"BeautifulSoup4" and "lxml" required to import Wordpress XML files.')
sys.exit(error)
with open(xml, encoding='utf-8') as infile:
xmlfile = infile.read()
soup = BeautifulSoup(xmlfile, "xml")
items = soup.rss.channel.findAll('item')
return items
def get_filename(filename, post_id):
if filename is not None:
return filename
else:
return post_id
def wp2fields(xml, wp_custpost=False):
"""Opens a wordpress XML file, and yield Pelican fields"""
items = get_items(xml)
for item in items:
if item.find('status').string in ["publish", "draft"]:
try:
# Use HTMLParser due to issues with BeautifulSoup 3
title = unescape(item.title.contents[0])
except IndexError:
title = 'No title [%s]' % item.find('post_name').string
logger.warning('Post "%s" is lacking a proper title', title)
filename = item.find('post_name').string
post_id = item.find('post_id').string
filename = get_filename(filename, post_id)
content = item.find('encoded').string
raw_date = item.find('post_date').string
date_object = time.strptime(raw_date, "%Y-%m-%d %H:%M:%S")
date = time.strftime("%Y-%m-%d %H:%M", date_object)
author = item.find('creator').string
categories = [cat.string for cat in item.findAll('category', {'domain' : 'category'})]
# caturl = [cat['nicename'] for cat in item.find(domain='category')]
tags = [tag.string for tag in item.findAll('category', {'domain' : 'post_tag'})]
# To publish a post the status should be 'published'
status = 'published' if item.find('status').string == "publish" else item.find('status').string
kind = 'article'
post_type = item.find('post_type').string
if post_type == 'page':
kind = 'page'
elif wp_custpost:
if post_type == 'post':
pass
# Old behaviour was to name everything not a page as an article.
# Theoretically all attachments have status == inherit so
# no attachments should be here. But this statement is to
# maintain existing behaviour in case that doesn't hold true.
elif post_type == 'attachment':
pass
else:
kind = post_type
yield (title, content, filename, date, author, categories, tags, status,
kind, "wp-html")
def dc2fields(file):
"""Opens a Dotclear export file, and yield pelican fields"""
try:
from bs4 import BeautifulSoup
except ImportError:
error = ('Missing dependency '
'"BeautifulSoup4" and "lxml" required to import Dotclear files.')
sys.exit(error)
in_cat = False
in_post = False
category_list = {}
posts = []
with open(file, 'r', encoding='utf-8') as f:
for line in f:
# remove final \n
line = line[:-1]
if line.startswith('[category'):
in_cat = True
elif line.startswith('[post'):
in_post = True
elif in_cat:
fields = line.split('","')
if not line:
in_cat = False
else:
# remove 1st and last ""
fields[0] = fields[0][1:]
# fields[-1] = fields[-1][:-1]
category_list[fields[0]]=fields[2]
elif in_post:
if not line:
in_post = False
break
else:
posts.append(line)
print("%i posts read." % len(posts))
for post in posts:
fields = post.split('","')
# post_id = fields[0][1:]
# blog_id = fields[1]
# user_id = fields[2]
cat_id = fields[3]
# post_dt = fields[4]
# post_tz = fields[5]
post_creadt = fields[6]
# post_upddt = fields[7]
# post_password = fields[8]
# post_type = fields[9]
post_format = fields[10]
# post_url = fields[11]
# post_lang = fields[12]
post_title = fields[13]
post_excerpt = fields[14]
post_excerpt_xhtml = fields[15]
post_content = fields[16]
post_content_xhtml = fields[17]
# post_notes = fields[18]
# post_words = fields[19]
# post_status = fields[20]
# post_selected = fields[21]
# post_position = fields[22]
# post_open_comment = fields[23]
# post_open_tb = fields[24]
# nb_comment = fields[25]
# nb_trackback = fields[26]
post_meta = fields[27]
# redirect_url = fields[28][:-1]
# remove seconds
post_creadt = ':'.join(post_creadt.split(':')[0:2])
author = ""
categories = []
tags = []
if cat_id:
categories = [category_list[id].strip() for id in cat_id.split(',')]
# Get tags related to a post
tag = post_meta.replace('{', '').replace('}', '').replace('a:1:s:3:\\"tag\\";a:', '').replace('a:0:', '')
if len(tag) > 1:
if int(tag[:1]) == 1:
newtag = tag.split('"')[1]
tags.append(
BeautifulSoup(
newtag
, "xml"
)
# bs4 always outputs UTF-8
.decode('utf-8')
)
else:
i=1
j=1
while(i <= int(tag[:1])):
newtag = tag.split('"')[j].replace('\\','')
tags.append(
BeautifulSoup(
newtag
, "xml"
)
# bs4 always outputs UTF-8
.decode('utf-8')
)
i=i+1
if j < int(tag[:1])*2:
j=j+2
"""
dotclear2 does not use markdown by default unless you use the markdown plugin
Ref: http://plugins.dotaddict.org/dc2/details/formatting-markdown
"""
if post_format == "markdown":
content = post_excerpt + post_content
else:
content = post_excerpt_xhtml + post_content_xhtml
content = content.replace('\\n', '')
post_format = "html"
kind = 'article' # TODO: Recognise pages
status = 'published' # TODO: Find a way for draft posts
yield (post_title, content, slugify(post_title), post_creadt, author,
categories, tags, status, kind, post_format)
def posterous2fields(api_token, email, password):
"""Imports posterous posts"""
import base64
from datetime import timedelta
try:
# py3k import
import json
except ImportError:
# py2 import
import simplejson as json
try:
# py3k import
import urllib.request as urllib_request
except ImportError:
# py2 import
import urllib2 as urllib_request
def get_posterous_posts(api_token, email, password, page = 1):
base64string = base64.encodestring(("%s:%s" % (email, password)).encode('utf-8')).replace(b'\n', b'')
url = "http://posterous.com/api/v2/users/me/sites/primary/posts?api_token=%s&page=%d" % (api_token, page)
request = urllib_request.Request(url)
request.add_header("Authorization", "Basic %s" % base64string.decode())
handle = urllib_request.urlopen(request)
posts = json.loads(handle.read().decode('utf-8'))
return posts
page = 1
posts = get_posterous_posts(api_token, email, password, page)
while len(posts) > 0:
posts = get_posterous_posts(api_token, email, password, page)
page += 1
for post in posts:
slug = post.get('slug')
if not slug:
slug = slugify(post.get('title'))
tags = [tag.get('name') for tag in post.get('tags')]
raw_date = post.get('display_date')
date_object = SafeDatetime.strptime(raw_date[:-6], "%Y/%m/%d %H:%M:%S")
offset = int(raw_date[-5:])
delta = timedelta(hours = offset / 100)
date_object -= delta
date = date_object.strftime("%Y-%m-%d %H:%M")
kind = 'article' # TODO: Recognise pages
status = 'published' # TODO: Find a way for draft posts
yield (post.get('title'), post.get('body_cleaned'), slug, date,
post.get('user').get('display_name'), [], tags, status, kind, "html")
def tumblr2fields(api_key, blogname):
""" Imports Tumblr posts (API v2)"""
from time import strftime, localtime
try:
# py3k import
import json
except ImportError:
# py2 import
import simplejson as json
try:
# py3k import
import urllib.request as urllib_request
except ImportError:
# py2 import
import urllib2 as urllib_request
def get_tumblr_posts(api_key, blogname, offset=0):
url = "http://api.tumblr.com/v2/blog/%s.tumblr.com/posts?api_key=%s&offset=%d&filter=raw" % (blogname, api_key, offset)
request = urllib_request.Request(url)
handle = urllib_request.urlopen(request)
posts = json.loads(handle.read().decode('utf-8'))
return posts.get('response').get('posts')
offset = 0
posts = get_tumblr_posts(api_key, blogname, offset)
while len(posts) > 0:
for post in posts:
title = post.get('title') or post.get('source_title') or post.get('type').capitalize()
slug = post.get('slug') or slugify(title)
tags = post.get('tags')
timestamp = post.get('timestamp')
date = strftime("%Y-%m-%d %H:%M:%S", localtime(int(timestamp)))
slug = strftime("%Y-%m-%d-", localtime(int(timestamp))) + slug
format = post.get('format')
content = post.get('body')
type = post.get('type')
if type == 'photo':
if format == 'markdown':
fmtstr = ''
else:
fmtstr = '<img alt="%s" src="%s" />'
content = '\n'.join(fmtstr % (photo.get('caption'), photo.get('original_size').get('url')) for photo in post.get('photos'))
content += '\n\n' + post.get('caption')
elif type == 'quote':
if format == 'markdown':
fmtstr = '\n\n— %s'
else:
fmtstr = '<p>— %s</p>'
content = post.get('text') + fmtstr % post.get('source')
elif type == 'link':
if format == 'markdown':
fmtstr = '[via](%s)\n\n'
else:
fmtstr = '<p><a href="%s">via</a></p>\n'
content = fmtstr % post.get('url') + post.get('description')
elif type == 'audio':
if format == 'markdown':
fmtstr = '[via](%s)\n\n'
else:
fmtstr = '<p><a href="%s">via</a></p>\n'
content = fmtstr % post.get('source_url') + post.get('caption') + post.get('player')
elif type == 'video':
if format == 'markdown':
fmtstr = '[via](%s)\n\n'
else:
fmtstr = '<p><a href="%s">via</a></p>\n'
content = fmtstr % post.get('source_url') + post.get('caption') + '\n'.join(player.get('embed_code') for player in post.get('player'))
elif type == 'answer':
title = post.get('question')
content = '<p><a href="%s" rel="external nofollow">%s</a>: %s</p>\n%s' % (post.get('asking_name'), post.get('asking_url'), post.get('question'), post.get('answer'))
content = content.rstrip() + '\n'
kind = 'article'
status = 'published' # TODO: Find a way for draft posts
yield (title, content, slug, date, post.get('blog_name'), [type],
tags, status, kind, format)
offset += len(posts)
posts = get_tumblr_posts(api_key, blogname, offset)
def feed2fields(file):
"""Read a feed and yield pelican fields"""
import feedparser
d = feedparser.parse(file)
for entry in d.entries:
date = (time.strftime("%Y-%m-%d %H:%M", entry.updated_parsed)
if hasattr(entry, "updated_parsed") else None)
author = entry.author if hasattr(entry, "author") else None
tags = [e['term'] for e in entry.tags] if hasattr(entry, "tags") else None
slug = slugify(entry.title)
kind = 'article'
yield (entry.title, entry.description, slug, date, author, [], tags, None,
kind, "html")
def build_header(title, date, author, categories, tags, slug, status=None, attachments=None):
from docutils.utils import column_width
"""Build a header from a list of fields"""
header = '%s\n%s\n' % (title, '#' * column_width(title))
if date:
header += ':date: %s\n' % date
if author:
header += ':author: %s\n' % author
if categories:
header += ':category: %s\n' % ', '.join(categories)
if tags:
header += ':tags: %s\n' % ', '.join(tags)
if slug:
header += ':slug: %s\n' % slug
if status:
header += ':status: %s\n' % status
if attachments:
header += ':attachments: %s\n' % ', '.join(attachments)
header += '\n'
return header
def build_markdown_header(title, date, author, categories, tags, slug, status=None,
attachments=None):
"""Build a header from a list of fields"""
header = 'Title: %s\n' % title
if date:
header += 'Date: %s\n' % date
if author:
header += 'Author: %s\n' % author
if categories:
header += 'Category: %s\n' % ', '.join(categories)
if tags:
header += 'Tags: %s\n' % ', '.join(tags)
if slug:
header += 'Slug: %s\n' % slug
if status:
header += 'Status: %s\n' % status
if attachments:
header += 'Attachments: %s\n' % ', '.join(attachments)
header += '\n'
return header
def get_ext(out_markup, in_markup='html'):
if in_markup == 'markdown' or out_markup == 'markdown':
ext = '.md'
else:
ext = '.rst'
return ext
def get_out_filename(output_path, filename, ext, kind,
dirpage, dircat, categories, wp_custpost):
filename = os.path.basename(filename)
# Enforce filename restrictions for various filesystems at once; see
# http://en.wikipedia.org/wiki/Filename#Reserved_characters_and_words
# we do not need to filter words because an extension will be appended
filename = re.sub(r'[<>:"/\\|?*^% ]', '-', filename) # invalid chars
filename = filename.lstrip('.') # should not start with a dot
if not filename:
filename = '_'
filename = filename[:249] # allow for 5 extra characters
out_filename = os.path.join(output_path, filename+ext)
# option to put page posts in pages/ subdirectory
if dirpage and kind == 'page':
pages_dir = os.path.join(output_path, 'pages')
if not os.path.isdir(pages_dir):
os.mkdir(pages_dir)
out_filename = os.path.join(pages_dir, filename+ext)
elif not dirpage and kind == 'page':
pass
# option to put wp custom post types in directories with post type
# names. Custom post types can also have categories so option to
# create subdirectories with category names
elif kind != 'article':
if wp_custpost:
typename = slugify(kind)
else:
typename = ''
kind = 'article'
if dircat and (len(categories) > 0):
catname = slugify(categories[0])
else:
catname = ''
out_filename = os.path.join(output_path, typename,
catname, filename+ext)
if not os.path.isdir(os.path.join(output_path, typename, catname)):
os.makedirs(os.path.join(output_path, typename, catname))
# option to put files in directories with categories names
elif dircat and (len(categories) > 0):
catname = slugify(categories[0])
out_filename = os.path.join(output_path, catname, filename+ext)
if not os.path.isdir(os.path.join(output_path, catname)):
os.mkdir(os.path.join(output_path, catname))
return out_filename
def get_attachments(xml):
"""returns a dictionary of posts that have attachments with a list
of the attachment_urls
"""
items = get_items(xml)
names = {}
attachments = []
for item in items:
kind = item.find('post_type').string
filename = item.find('post_name').string
post_id = item.find('post_id').string
if kind == 'attachment':
attachments.append((item.find('post_parent').string,
item.find('attachment_url').string))
else:
filename = get_filename(filename, post_id)
names[post_id] = filename
attachedposts = {}
for parent, url in attachments:
try:
parent_name = names[parent]
except KeyError:
#attachment's parent is not a valid post
parent_name = None
try:
attachedposts[parent_name].append(url)
except KeyError:
attachedposts[parent_name] = []
attachedposts[parent_name].append(url)
return attachedposts
def download_attachments(output_path, urls):
"""Downloads wordpress attachments and returns a list of paths to
attachments that can be associated with a post (relative path to output
directory). Files that fail to download, will not be added to posts"""
locations = []
for url in urls:
path = urlparse(url).path
#teardown path and rebuild to negate any errors with
#os.path.join and leading /'s
path = path.split('/')
filename = path.pop(-1)
localpath = ''
for item in path:
if sys.platform != 'win32' or ':' not in item:
localpath = os.path.join(localpath, item)
full_path = os.path.join(output_path, localpath)
if not os.path.exists(full_path):
os.makedirs(full_path)
print('downloading {}'.format(filename))
try:
urlretrieve(url, os.path.join(full_path, filename))
locations.append(os.path.join(localpath, filename))
except (URLError, IOError) as e:
#Python 2.7 throws an IOError rather Than URLError
logger.warning("No file could be downloaded from %s\n%s", url, e)
return locations
def fields2pelican(fields, out_markup, output_path,
dircat=False, strip_raw=False, disable_slugs=False,
dirpage=False, filename_template=None, filter_author=None,
wp_custpost=False, wp_attach=False, attachments=None):
for (title, content, filename, date, author, categories, tags, status,
kind, in_markup) in fields:
if filter_author and filter_author != author:
continue
slug = not disable_slugs and filename or None
if wp_attach and attachments:
try:
urls = attachments[filename]
attached_files = download_attachments(output_path, urls)
except KeyError:
attached_files = None
else:
attached_files = None
ext = get_ext(out_markup, in_markup)
if ext == '.md':
header = build_markdown_header(title, date, author, categories,
tags, slug, status, attached_files)
else:
out_markup = "rst"
header = build_header(title, date, author, categories,
tags, slug, status, attached_files)
out_filename = get_out_filename(output_path, filename, ext,
kind, dirpage, dircat, categories, wp_custpost)
print(out_filename)
if in_markup in ("html", "wp-html"):
html_filename = os.path.join(output_path, filename+'.html')
with open(html_filename, 'w', encoding='utf-8') as fp:
# Replace newlines with paragraphs wrapped with <p> so
# HTML is valid before conversion
if in_markup == "wp-html":
new_content = decode_wp_content(content)
else:
paragraphs = content.splitlines()
paragraphs = ['<p>{0}</p>'.format(p) for p in paragraphs]
new_content = ''.join(paragraphs)
fp.write(new_content)
parse_raw = '--parse-raw' if not strip_raw else ''
cmd = ('pandoc --normalize {0} --from=html'
' --to={1} -o "{2}" "{3}"').format(
parse_raw, out_markup, out_filename, html_filename)
try:
rc = subprocess.call(cmd, shell=True)
if rc < 0:
error = "Child was terminated by signal %d" % -rc
exit(error)
elif rc > 0:
error = "Please, check your Pandoc installation."
exit(error)
except OSError as e:
error = "Pandoc execution failed: %s" % e
exit(error)
os.remove(html_filename)
with open(out_filename, 'r', encoding='utf-8') as fs:
content = fs.read()
if out_markup == "markdown":
# In markdown, to insert a <br />, end a line with two or more spaces & then a end-of-line
content = content.replace("\\\n ", " \n")
content = content.replace("\\\n", " \n")
with open(out_filename, 'w', encoding='utf-8') as fs:
fs.write(header + content)
if wp_attach and attachments and None in attachments:
print("downloading attachments that don't have a parent post")
urls = attachments[None]
orphan_galleries = download_attachments(output_path, urls)
def main():
parser = argparse.ArgumentParser(
description="Transform feed, WordPress, Tumblr, Dotclear, or Posterous "
"files into reST (rst) or Markdown (md) files. Be sure to "
"have pandoc installed.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(dest='input', help='The input file to read')
parser.add_argument('--wpfile', action='store_true', dest='wpfile',
help='Wordpress XML export')
parser.add_argument('--dotclear', action='store_true', dest='dotclear',
help='Dotclear export')
parser.add_argument('--posterous', action='store_true', dest='posterous',
help='Posterous export')
parser.add_argument('--tumblr', action='store_true', dest='tumblr',
help='Tumblr export')
parser.add_argument('--feed', action='store_true', dest='feed',
help='Feed to parse')
parser.add_argument('-o', '--output', dest='output', default='output',
help='Output path')
parser.add_argument('-m', '--markup', dest='markup', default='rst',
help='Output markup format (supports rst & markdown)')
parser.add_argument('--dir-cat', action='store_true', dest='dircat',
help='Put files in directories with categories name')
parser.add_argument('--dir-page', action='store_true', dest='dirpage',
help=('Put files recognised as pages in "pages/" sub-directory'
' (wordpress import only)'))
parser.add_argument('--filter-author', dest='author',
help='Import only post from the specified author')
parser.add_argument('--strip-raw', action='store_true', dest='strip_raw',
help="Strip raw HTML code that can't be converted to "
"markup such as flash embeds or iframes (wordpress import only)")
parser.add_argument('--wp-custpost', action='store_true',
dest='wp_custpost',
help='Put wordpress custom post types in directories. If used with '
'--dir-cat option directories will be created as '
'/post_type/category/ (wordpress import only)')
parser.add_argument('--wp-attach', action='store_true', dest='wp_attach',
help='(wordpress import only) Download files uploaded to wordpress as '
'attachments. Files will be added to posts as a list in the post '
'header. All files will be downloaded, even if '
"they aren't associated with a post. Files with be downloaded "
'with their original path inside the output directory. '
'e.g. output/wp-uploads/date/postname/file.jpg '
'-- Requires an internet connection --')
parser.add_argument('--disable-slugs', action='store_true',
dest='disable_slugs',
help='Disable storing slugs from imported posts within output. '
'With this disabled, your Pelican URLs may not be consistent '
'with your original posts.')
parser.add_argument('-e', '--email', dest='email',
help="Email address (posterous import only)")
parser.add_argument('-p', '--password', dest='password',
help="Password (posterous import only)")
parser.add_argument('-b', '--blogname', dest='blogname',
help="Blog name (Tumblr import only)")
args = parser.parse_args()
input_type = None
if args.wpfile:
input_type = 'wordpress'
elif args.dotclear:
input_type = 'dotclear'
elif args.posterous:
input_type = 'posterous'
elif args.tumblr:
input_type = 'tumblr'
elif args.feed:
input_type = 'feed'
else:
error = "You must provide either --wpfile, --dotclear, --posterous, --tumblr or --feed options"
exit(error)
if not os.path.exists(args.output):
try:
os.mkdir(args.output)
except OSError:
error = "Unable to create the output folder: " + args.output
exit(error)
if args.wp_attach and input_type != 'wordpress':
error = "You must be importing a wordpress xml to use the --wp-attach option"
exit(error)
if input_type == 'wordpress':
fields = wp2fields(args.input, args.wp_custpost or False)
elif input_type == 'dotclear':
fields = dc2fields(args.input)
elif input_type == 'posterous':
fields = posterous2fields(args.input, args.email, args.password)
elif input_type == 'tumblr':
fields = tumblr2fields(args.input, args.blogname)
elif input_type == 'feed':
fields = feed2fields(args.input)
if args.wp_attach:
attachments = get_attachments(args.input)
else:
attachments = None
init() # init logging
fields2pelican(fields, args.markup, args.output,
dircat=args.dircat or False,
dirpage=args.dirpage or False,
strip_raw=args.strip_raw or False,
disable_slugs=args.disable_slugs or False,
filter_author=args.author,
wp_custpost = args.wp_custpost or False,
wp_attach = args.wp_attach or False,
attachments = attachments or None)
| agpl-3.0 |
bikong2/django | tests/null_fk_ordering/models.py | 210 | 1605 | """
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
* #7512: including a nullable foreign key reference in Meta ordering has un
xpected results
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
# The first two models represent a very simple null FK ordering case.
class Author(models.Model):
name = models.CharField(max_length=150)
@python_2_unicode_compatible
class Article(models.Model):
title = models.CharField(max_length=150)
author = models.ForeignKey(Author, models.SET_NULL, null=True)
def __str__(self):
return 'Article titled: %s' % (self.title, )
class Meta:
ordering = ['author__name', ]
# These following 4 models represent a far more complex ordering case.
class SystemInfo(models.Model):
system_name = models.CharField(max_length=32)
class Forum(models.Model):
system_info = models.ForeignKey(SystemInfo, models.CASCADE)
forum_name = models.CharField(max_length=32)
@python_2_unicode_compatible
class Post(models.Model):
forum = models.ForeignKey(Forum, models.SET_NULL, null=True)
title = models.CharField(max_length=32)
def __str__(self):
return self.title
@python_2_unicode_compatible
class Comment(models.Model):
post = models.ForeignKey(Post, models.SET_NULL, null=True)
comment_text = models.CharField(max_length=250)
class Meta:
ordering = ['post__forum__system_info__system_name', 'comment_text']
def __str__(self):
return self.comment_text
| bsd-3-clause |
xxn59/weat | app/views.py | 1 | 14710 | from flask import render_template, flash, redirect, session, url_for, request, g, make_response
from flask.ext.login import login_user, logout_user, current_user, login_required
from . import db, lm
from . import app
from .forms import LoginForm, SignupForm, FoodForm, ChangePasswordForm, AddFoodForm, EditForm
from datetime import datetime, date, time, timedelta
from .models import User, Food, Salad, Order
# food_list = []
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
@app.before_request
def before_request():
g.user = current_user
@app.errorhandler(404)
def not_found_error(error):
return render_template('404.html'), 404
@app.errorhandler(500)
def internal_error(error):
db.session.rollback()
return render_template('500.html'), 500
@app.route('/', methods=['GET', 'POST'])
@app.route('/index', methods=['GET', 'POST'])
@app.route('/index/<int:page>', methods=['GET', 'POST'])
@login_required
def index():
user = g.user
orders = Order.query.filter().order_by(Order.timestamp.desc())
if request.method == 'POST':
# print request.form.values
copy_id = request.form.get('copy', None)
if copy_id is not None:
copy_salad = Salad.query.get(copy_id)
new_salad = Salad(foods=copy_salad.foods, price=copy_salad.price)
db.session.add(new_salad)
new_order = Order(cos_id=user.id, status=9, price=new_salad.price, timestamp=datetime.utcnow())
new_order.salads.append(new_salad)
db.session.add(new_order)
db.session.commit()
return redirect(url_for('order_review', source='copy'))
return render_template('index.html',
title='We eat together!',
user=user,
orders=orders)
@app.route('/signup', methods=['GET', 'POST'])
def signup():
form = SignupForm()
if form.validate_on_submit():
# session['remember_me'] = form.remember_me.data
# print 'on submit'
# print 'form.nickname:', form1.nickname
user = User.query.filter_by(nickname=form.nickname.data).first()
if user is None:
# print 'new nickname,adding to db'
user = User(nickname=form.nickname.data,
floor=form.floor.data,
group=form.group.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
return redirect(request.args.get('next') or url_for('index'))
else:
# print 'nickname exist:', user.nickname
flash('User exists.' % form.nickname.data)
# return redirect(request.args.get('next') or url_for('index'))
# remember_me = False
# return oid.try_login(form.openid.data, ask_for=['nickname', 'email'])
return render_template('signup.html',
title='Sign Up for Weat!',
form=form)
@app.route('/login', methods=['GET', 'POST'])
def login():
if g.user is not None and g.user.is_authenticated:
# print 'user valid:', g.user
return redirect(url_for('index'))
form = LoginForm()
if form.validate_on_submit():
session['remember_me'] = form.remember_me.data
# print 'on submit'
# print 'form.nickname:',form.nickname
user = User.query.filter_by(nickname=form.nickname.data).first()
# print 'filtering nickname'
if user is None:
# print 'nickname none'
flash('The nickname is not registered.')
# return redirect(url_for('signup'))
# user = User(nickname=form.nickname.data, floor=form.floor.data)
# db.session.add(user)
# db.session.commit()
# return redirect(url_for('signup'))
else:
if user.is_admin():
pass
# flash('please enter the PASSWORD')
# return redirect(url_for('login_admin'))
# print 'nickname exist:', user.nickname
login_user(user, remember=session['remember_me'])
return redirect(request.args.get('next') or url_for('index'))
# remember_me = False
# return oid.try_login(form.openid.data, ask_for=['nickname', 'email'])
return render_template('login.html',
title='Sign In',
form=form)
@app.route('/login_admin', methods=['GET', 'POST'])
def login_admin():
form = LoginForm()
return render_template('login_admin.html',
title='Sign In',
form=form)
@app.route('/logout')
def logout():
logout_user()
return redirect(url_for('index'))
@app.route('/user/<nickname>')
@app.route('/user/<nickname>/<int:page>')
@login_required
def user(nickname, page=1):
user = User.query.filter_by(nickname=nickname).first()
if user is None:
# print 'user is none in /user/profile'
flash('User %s not found.' % nickname)
return redirect(url_for('index'))
# print 'user:', user.nickname
return render_template('user.html',
user=user)
from models import food_category
@app.route('/food_add', methods=['GET', 'POST'])
@login_required
def food_add():
user = g.user
if user.level < 3:
return redirect(url_for('index'))
form = FoodForm()
form.cat.choices = [(f, f) for f in food_category]
foods = Food.query.all()
if form.validate_on_submit():
# print 'food add commit'
food = Food.query.filter_by(name=form.name.data).first()
if food is None:
food = Food(name=form.name.data, price=form.price.data, cat=form.cat.data)
db.session.add(food)
db.session.commit()
flash('add food %s succeed!' % food.name)
# print 'food added:', food.name
return redirect(url_for('food_add'))
else:
# print 'food exists:', food.name
flash('this food is already included.')
return render_template('food_add.html',
title='Add new food',
form=form,
foods=foods)
@app.route('/order_add', methods=['GET', 'POST'])
@login_required
def order_add():
user = g.user
form = AddFoodForm()
foods = Food.query.filter(Food.cat != 'new_arrival').order_by(Food.price)
foods_new = Food.query.filter(Food.cat == 'new_arrival').order_by(Food.price)
if request.method == 'POST':
# print request.form.values
done = request.form.get('over', None)
# print done
if done == "7963":
# print 'yes,done=7963'
# meal = request.form.get('meal', None)
# if meal is None:
# flash('please choose which meal you want to order')
# return redirect(url_for('order_add'))
submit_order = Order.query.filter_by(cos_id=user.id, status=1).first()
if submit_order is None:
flash('no unconfirmed order to submit ')
return redirect(url_for('order_add'))
submit_salad = Salad.query.filter_by(order_id=submit_order.id, status=1).first()
if submit_salad is None:
flash('no incomplete salad to submit')
return redirect(url_for('order_add'))
for f in submit_salad.foods:
submit_salad.price = submit_salad.price + f.price
submit_salad.price += 4
if submit_salad.price < 25:
flash('price < 25, please add something more~')
return redirect(url_for('order_add'))
for s in submit_order.salads:
submit_order.price = submit_order.price + s.price
submit_order.status = 2
submit_salad.status = 2
submit_order.timestamp = datetime.utcnow()
# print 'db commit'
db.session.commit()
# user.add_order(new_order)
return redirect(url_for('order_review', source='new'))
click_id = request.form.get('add', None)
if click_id is None:
# print 'no click'
pass
else:
# print 'click_id:', click_id
new_order = Order.query.filter_by(cos_id=user.id, status=1).first()
if new_order is None:
new_order = Order(cos_id=user.id, status=1)
db.session.add(new_order)
# print 'added new order'
new_salad = Salad.query.filter_by(order_id=new_order.id, status=1).first()
if new_salad is None:
new_salad = Salad(order_id=new_order.id, status=1)
db.session.add(new_salad)
# print 'added new salad'
food = Food.query.get(click_id)
new_salad.foods.append(food)
db.session.commit()
resp = make_response('', 204)
return resp
# db.session.commit()
# print 'food_list:', food_list
# new_salad.add_food(food)
# db.session.commit()
if form.validate_on_submit():
print 'here'
# if form.remove_id.data is not None and form.remove_id.data != 9999:
# print 'remove id:', form.remove_id.data
# food1 = foods.query.filter_by(id=form.remove_id.data)
# if food1 is None:
# print 'delete error:', form.remove_id.data
# else:
# db.delete(food1)
# print 'food deleted:', food1.name
# db.commit()
#
return render_template('order_add.html',
title='add new order',
form=form,
foods=foods,
foods_new=foods_new)
@app.route('/order_review/<source>', methods=['GET', 'POST'])
@login_required
def order_review(source):
user = g.user
if source == 'copy':
new_orders = Order.query.filter_by(cos_id=user.id, status=9)
if source == 'new':
new_orders = Order.query.filter_by(cos_id=user.id, status=2)
# print 'this is from newing order'
if new_orders is not None:
# print new_order.id
if request.method == 'POST':
confirm = request.form.get('confirm', None)
remove = request.form.get('remove_order', None)
if confirm is not None:
meal = request.form.get('meal', None)
if meal is None:
flash('please choose which meal you want to order')
return redirect(url_for('order_review', source=source))
new_order = Order.query.get(confirm)
new_order.status = 3
new_order.timestamp = datetime.utcnow()
# # new_order.status = 3
new_order.meal = meal
db.session.commit()
return redirect(url_for('orders'))
if remove is not None:
remove_order = Order.query.get(remove)
db.session.delete(remove_order)
for s in remove_order.salads:
db.session.delete(s)
db.session.commit()
return redirect(url_for('order_review',source='new'))
return render_template('order_review.html',
title='add new order',
orders=new_orders)
@app.route('/orders', methods=['GET', 'POST'])
@login_required
def orders():
user = g.user
if user.level < 3:
orders = Order.query.filter_by(cos_id=user.id)
o = orders.first()
if o is None:
order_len = 0
else:
order_len = 1
print order_len
if request.method == 'POST':
btn = request.form.get('remove', None)
if btn is not None:
print btn
del_order = Order.query.get(btn)
print del_order.cos_id
user.del_order(del_order)
# db.session.remove(del_order)
db.session.commit()
return redirect(url_for('orders'))
else:
print 'btn is none'
return render_template('orders.html',
title='My Orders',
user=user,
orders=orders,
len=order_len)
return redirect(url_for('orders_all'))
@app.route('/orders_all', methods=['GET', 'POST'])
@login_required
def orders_all():
user = g.user
if user.level >= 3:
dateNow = datetime.utcnow().date()
# timeNow = datetime.utcnow().time()
time_z = time(0, 0)
# dinner_end = time(19, 0)
query_begin = datetime.combine(dateNow, time_z) - timedelta(days=1)
# query_end = datetime.combine(dateNow, time_z)
orders = Order.query.all()
orders_lunch = Order.query.filter(Order.status == 3, Order.meal == 'lunch')
orders_dinner = Order.query.filter(Order.timestamp.between(query_begin, datetime.utcnow()), Order.status == 3,
Order.meal == 'dinner')
return render_template('orders_all.html',
title='All Orders',
user=user,
orders_lunch=orders_lunch,
orders_dinner=orders_dinner
)
return redirect(url_for('orders'))
@app.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
return render_template("change_password.html", form=form)
@app.route('/user_edit', methods=['GET', 'POST'])
@login_required
def user_edit():
user = g.user
form = EditForm()
form.floor.data = user.floor
form.group.data = user.group
if form.validate_on_submit():
user.floor = form.floor.data
user.group = form.group.data
return redirect(url_for('user'))
return render_template("user_edit.html", user=user, form=form)
@app.route('/users', methods=['GET', 'POST'])
@login_required
def users():
user = g.user
if user.level < 5:
return redirect(url_for('index'))
users_all = User.query.all()
return render_template("users.html", users=users_all)
@app.route('/pay', methods=['GET', 'POST'])
def pay():
return render_template("pay_test.html")
@app.route('/about_me', methods=['GET', 'POST'])
def about_me():
return render_template("about_me.html")
| mit |
saucelabs/Diamond | src/collectors/onewire/onewire.py | 15 | 2414 | # coding=utf-8
"""
The OneWireCollector collects data from 1-Wire Filesystem
You can configure which sensors are read in two way:
- add section [scan] with attributes and aliases,
(collector will scan owfs to find attributes)
or
- add sections with format id:$SENSOR_ID
See also: http://owfs.org/
Author: Tomasz Prus
#### Dependencies
* owfs
"""
import os
import diamond.collector
class OneWireCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(OneWireCollector, self).get_default_config_help()
config_help.update({
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(OneWireCollector, self).get_default_config()
config.update({
'path': 'owfs',
'owfs': '/mnt/1wire',
#'scan': {'temperature': 't'},
#'id:24.BB000000': {'file_with_value': 'alias'},
})
return config
def collect(self):
"""
Overrides the Collector.collect method
"""
metrics = {}
if 'scan' in self.config:
for ld in os.listdir(self.config['owfs']):
if '.' in ld:
self.read_values(ld, self.config['scan'], metrics)
for oid, files in self.config.iteritems():
if oid[:3] == 'id:':
self.read_values(oid[3:], files, metrics)
for fn, fv in metrics.iteritems():
self.publish(fn, fv, 2)
def read_values(self, oid, files, metrics):
"""
Reads values from owfs/oid/{files} and update
metrics with format [oid.alias] = value
"""
oid_path = os.path.join(self.config['owfs'], oid)
oid = oid.replace('.', '_')
for fn, alias in files.iteritems():
fv = os.path.join(oid_path, fn)
if os.path.isfile(fv):
try:
f = open(fv)
v = f.read()
f.close()
except:
self.log.error("Unable to read %s", fv)
raise
try:
v = float(v)
except:
self.log.error("Unexpected value %s in %s", v, fv)
raise
metrics["%s.%s" % (oid, alias)] = v
| mit |
wmealem/VerbTrainer | languages/spanish.py | 1 | 8117 | # Spanish verb conjugations
from collections import namedtuple, OrderedDict
# Spanish has two forms of the sps familiar - 'tú' and 'vos'
SpanishCategory = namedtuple('SpanishCategory', 'fps sps spsv tps fpp spp tpp')
_PRONOUNS = SpanishCategory('yo', 'tú', 'vos', 'él/ella/usted',
'nosotros/nosotras', 'vosotros/vosotras',
'ellos/ellas/ustedes')
_STD_FORMAT = '{} {}'
_STD_CLOZE_FORMAT = '{0} {{{{c1::{1}::{2}, {3}}}}}'
_TENSES =\
[# tiempos simples
'presente',
'pretérito imperfecto',
'pretérito indefinido',
'futuro simple',
# tiempos compuestos
'pretérito perfecto',
'pretérito pluscuamperfecto',
'pretérito anterior',
'futuro compuesto',
# condicional
'condicional simple',
'condicional compuesto',
# imperativo
'imperativo positivo',
'imperativo negativo',
# subjuntivo - tiempos simples
'presente de subjuntivo',
'imperfecto de subjuntivo(-ra)',
'imperfecto de subjuntivo(-se)'
'futuro simple de subjuntivo',
# subjuntivo - timepos compuestos
'pretérito perfecto de subjuntivo',
'pluscuamperfecto de subjuntivo',
'futuro compuesto de subjuntivo'
]
# Endings for the simple tenses
_ENDINGS =\
{'ar':
{'presente': SpanishCategory('o', 'as', 'ás', 'a',
'amos', 'áis', 'an'),
'pretérito imperfecto': SpanishCategory('aba', 'abas', 'abas', 'aba',
'ábamos', 'abais', 'aban'),
'futuro simple': SpanishCategory('é', 'ás', 'ás', 'á',
'emos', 'éis', 'án'),
'pretérito indefinido': SpanishCategory('é', 'aste(s)', 'aste(s)', 'ó',
'amos', 'asteis', 'aron')
},
'er':
{'presente': SpanishCategory('o', 'es', 'és', 'e',
'emos', 'éis', 'en'),
'pretérito imperfecto': SpanishCategory('ía', 'ías', 'ías', 'ía',
'íamos', 'íais', 'ían'),
'futuro simple': SpanishCategory('é', 'ás', 'ás', 'á',
'emos', 'éis', 'án'),
'pretérito indefinido': SpanishCategory('í', 'iste(s)', 'iste(s)','ió',
'imos', 'isteis', 'ieron')
},
'ir':
{'presente': SpanishCategory('o', 'es', 'ís', 'e',
'imos', 'ís', 'en'),
'pretérito imperfecto': SpanishCategory('ía', 'ías', 'ías', 'ía',
'íamos', 'íais', 'ían'),
'futuro simple': SpanishCategory('é', 'ás', 'ás', 'á',
'emos', 'éis', 'án'),
'pretérito indefinido': SpanishCategory('í', 'iste(s)', 'iste(s)', 'ió',
'imos', 'isteis', 'ieron')
}
}
# logic for adjusting the stem of the verb for the case
_STEM_RULES =\
{'presente': (lambda x: x[:-2]),
'pretérito imperfecto': (lambda x: x[:-2]),
'futuro simple': (lambda x: x),
'pretérito indefinido': (lambda x: x[:-2])
}
def construct_stem_and_ending(infinitive, tense):
if tense in ['pretérito perfecto']:
past_participle = _construct_past_participle(infinitive)
inflection = ['{} {}'.format(aux, past_participle)
for aux in AUX_VERB['haber']['presente']]
else:
stem = _STEM_RULES[tense](infinitive)
verb_type = infinitive[-2:]
endings = _ENDINGS[verb_type][tense]
inflection = [stem + end for end in endings]
return SpanishCategory._make(inflection)
def _construct_past_participle(infinitive):
'''
Given an infinitive, returns the past participle for
the given verb
'''
ending = infinitive[-2:]
stem = infinitive[:-2]
if ending == 'ar':
return stem + 'ado'
elif ending == 'er':
return stem + 'ido'
elif ending == 'ir':
return stem + 'ido'
else:
raise ValueError('parameter not a verb infinitive')
def construct_inflection(infinitive, tense):
'''
Given an infinitive and tense, constructs the combined
stem and ending, and then prepends the appropriate pronoun
'''
stem_and_ending = construct_stem_and_ending(infinitive, tense)
return SpanishCategory._make([item for item in zip(_PRONOUNS,
stem_and_ending)])
def output_normal_view(infinitive, tense, conj):
'''
Pretty-printing for the traditional two-column output
of a verb conjugation
'''
return ['{}, {}:'.format(infinitive, tense),
('⎯'*45), '{:<25}‖ {}'.format(_STD_FORMAT.format(*conj.fps),
_STD_FORMAT.format(*conj.fpp)),
'{:<25}‖ {}'.format(_STD_FORMAT.format(*conj.sps),
_STD_FORMAT.format(*conj.spp)),
'{:<25}‖'.format(_STD_FORMAT.format(*conj.spsv)),
'{:<25}‖ {}'.format(_STD_FORMAT.format(*conj.tps),
_STD_FORMAT. format(*conj.tpp))]
def output_cloze(infinitive, tense, conj):
'''
Combines the different parts of a verb conjugation with
Anki's required formatting to produce a form suitable
for a cloze-deletion card
'''
result = []
# TODO - make this pythonic, it's an ugly hack as it is
for i, item in enumerate(conj):
result.append(_STD_CLOZE_FORMAT.format(item[0], item[1],
infinitive, tense))
return SpanishCategory._make(result)
def output_cloze_import(infinitive, tense, translation, sound, conj):
'''
Combines the output of the output_cloze function with optional
translation and sound fields and combines them to produce the
format required for Anki's import function
'''
cloze = output_cloze(infinitive, tense, conj)
if translation:
add_trn = [cz + ('|{}'.format(trn)) for cz, trn in
zip(cloze, translation)]
else:
add_trn = [cz + '|' for cz in cloze]
if sound:
add_snd = [trn + ('|[sound:{}]'.format(snd)) for
trn, snd in zip(add_trn, sound)]
else:
add_snd = [trn + '|' for trn in add_trn]
add_tag = [snd + ('|{}'.format(infinitive)) for snd in add_snd]
return SpanishCategory._make(add_tag)
AUX_VERB = {'haber':
{'presente':
SpanishCategory._make(['he', 'has', 'has', 'ha',
'hemos', 'habéis', 'han']),
'pretérito imperfecto':
SpanishCategory._make(['habíá', 'habías', 'habías', 'había',
'habíamos', 'habíaís', 'habían']),
'pretérito indefinido':
SpanishCategory._make(['hube', 'hubiste(s)', 'hubiste(s)', 'hubo',
'hubimos', 'hubisteis', 'hubieron']),
'futuro simple':
SpanishCategory._make(['habré', 'habrás', 'habrás', 'habrá',
'habremos', 'habréis', 'habrán']),
'condicional simple':
SpanishCategory._make(['habría', 'habrías', 'habrías', 'habría',
'habríamos', 'habríais', 'habrían']),
'presente de subjuntivo':
SpanishCategory._make(['haya', 'hayas', 'hayas', 'haya',
'hayamos', 'hayáis', 'hayan']),
'imperfecto de subjuntivo(-ra)':
SpanishCategory._make(['hubiera', 'hubieras', 'hubieras', 'hubiera',
'hubiéramos', 'hubierais', 'hubieran']),
'imperfecto de subjuntivo(-se)':
SpanishCategory._make(['hubiese', 'hubieses', 'hubieses', 'hubiese',
'hubiésemos', 'hubieseis', 'hubiesen'])}}
| mit |
kevinastone/sentry | src/sentry/web/frontend/debug/mail.py | 8 | 5009 | from __future__ import absolute_import, print_function
import logging
import traceback
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
from sentry.models import (
Activity, Event, Group, Organization, Project, Rule, Team
)
from sentry.utils.samples import load_data
from sentry.utils.email import inline_css
from sentry.utils.http import absolute_uri
from sentry.web.decorators import login_required
from sentry.web.helpers import render_to_response, render_to_string
# TODO(dcramer): use https://github.com/disqus/django-mailviews
class MailPreview(object):
def __init__(self, html_template, text_template, context):
self.html_template = html_template
self.text_template = text_template
self.context = context
def text_body(self):
return render_to_string(self.text_template, self.context)
def html_body(self):
try:
return inline_css(render_to_string(self.html_template, self.context))
except Exception:
traceback.print_exc()
raise
@login_required
def new_event(request):
team = Team(
id=1,
slug='example',
name='Example',
)
project = Project(
id=1,
slug='example',
name='Example',
team=team,
)
group = Group(
id=1,
project=project,
message='This is an example event.',
level=logging.ERROR,
)
event = Event(
id=1,
project=project,
group=group,
message=group.message,
data=load_data('python'),
)
rule = Rule(label="An example rule")
interface_list = []
for interface in event.interfaces.itervalues():
body = interface.to_email_html(event)
if not body:
continue
interface_list.append((interface.get_title(), mark_safe(body)))
preview = MailPreview(
html_template='sentry/emails/error.html',
text_template='sentry/emails/error.html',
context={
'rule': rule,
'group': group,
'event': event,
'link': 'http://example.com/link',
'interfaces': interface_list,
'tags': event.get_tags(),
},
)
return render_to_response('sentry/debug/mail/preview.html', {
'preview': preview,
})
@login_required
def new_note(request):
team = Team(
id=1,
slug='example',
name='Example',
)
project = Project(
id=1,
slug='example',
name='Example',
team=team,
)
group = Group(
id=1,
project=project,
message='This is an example event.',
)
event = Event(
id=1,
project=project,
group=group,
message=group.message,
data=load_data('python'),
)
note = Activity(
group=event.group, event=event, project=event.project,
type=Activity.NOTE, user=request.user,
data={'text': 'This is an example note!'},
)
preview = MailPreview(
html_template='sentry/emails/new_note.html',
text_template='sentry/emails/new_note.txt',
context={
'text': note.data['text'],
'author': note.user,
'date': note.datetime,
'group': group,
'link': group.get_absolute_url(),
},
)
return render_to_response('sentry/debug/mail/preview.html', {
'preview': preview,
})
@login_required
def request_access(request):
org = Organization(
id=1,
slug='example',
name='Example',
)
team = Team(
id=1,
slug='example',
name='Example',
organization=org,
)
preview = MailPreview(
html_template='sentry/emails/request-team-access.html',
text_template='sentry/emails/request-team-access.txt',
context={
'email': 'foo@example.com',
'name': 'George Bush',
'organization': org,
'team': team,
'url': absolute_uri(reverse('sentry-organization-members', kwargs={
'organization_slug': org.slug,
}) + '?ref=access-requests'),
},
)
return render_to_response('sentry/debug/mail/preview.html', {
'preview': preview,
})
@login_required
def access_approved(request):
org = Organization(
id=1,
slug='example',
name='Example',
)
team = Team(
id=1,
slug='example',
name='Example',
organization=org,
)
preview = MailPreview(
html_template='sentry/emails/access-approved.html',
text_template='sentry/emails/access-approved.txt',
context={
'email': 'foo@example.com',
'name': 'George Bush',
'organization': org,
'team': team,
},
)
return render_to_response('sentry/debug/mail/preview.html', {
'preview': preview,
})
| bsd-3-clause |
bbansalWolfPack/servo | tests/wpt/css-tests/tools/manifest/manifest.py | 89 | 12720 | import json
import os
from collections import defaultdict
from item import item_types, ManualTest, WebdriverSpecTest, Stub, RefTest, TestharnessTest
from log import get_logger
from sourcefile import SourceFile
from utils import from_os_path, to_os_path
CURRENT_VERSION = 2
class ManifestError(Exception):
pass
class ManifestVersionMismatch(ManifestError):
pass
class Manifest(object):
def __init__(self, git_rev=None, url_base="/"):
# Dict of item_type: {path: set(manifest_items)}
self._data = dict((item_type, defaultdict(set))
for item_type in item_types)
self.rev = git_rev
self.url_base = url_base
self.local_changes = LocalChanges(self)
# reftest nodes arranged as {path: set(manifest_items)}
self.reftest_nodes = defaultdict(set)
self.reftest_nodes_by_url = {}
def _included_items(self, include_types=None):
if include_types is None:
include_types = item_types
for item_type in include_types:
paths = self._data[item_type].copy()
for local_types, local_paths in self.local_changes.itertypes(item_type):
for path, items in local_paths.iteritems():
paths[path] = items
for path in self.local_changes.iterdeleted():
if path in paths:
del paths[path]
yield item_type, paths
def contains_path(self, path):
return any(path in paths for _, paths in self._included_items())
def add(self, item):
if item is None:
return
is_reference = False
if isinstance(item, RefTest):
self.reftest_nodes[item.path].add(item)
self.reftest_nodes_by_url[item.url] = item
is_reference = item.is_reference
if not is_reference:
self._add(item)
item.manifest = self
def _add(self, item):
self._data[item.item_type][item.path].add(item)
def extend(self, items):
for item in items:
self.add(item)
def remove_path(self, path):
for item_type in item_types:
if path in self._data[item_type]:
del self._data[item_type][path]
def itertypes(self, *types):
if not types:
types = None
for item_type, items in self._included_items(types):
for item in sorted(items.items()):
yield item
def __iter__(self):
for item in self.itertypes():
yield item
def __getitem__(self, path):
for _, paths in self._included_items():
if path in paths:
return paths[path]
raise KeyError
def get_reference(self, url):
if url in self.local_changes.reftest_nodes_by_url:
return self.local_changes.reftest_nodes_by_url[url]
if url in self.reftest_nodes_by_url:
return self.reftest_nodes_by_url[url]
return None
def _committed_with_path(self, rel_path):
rv = set()
for paths_items in self._data.itervalues():
rv |= paths_items.get(rel_path, set())
if rel_path in self.reftest_nodes:
rv |= self.reftest_nodes[rel_path]
return rv
def _committed_paths(self):
rv = set()
for paths_items in self._data.itervalues():
rv |= set(paths_items.keys())
return rv
def update(self,
tests_root,
url_base,
new_rev,
committed_changes=None,
local_changes=None,
remove_missing_local=False):
if local_changes is None:
local_changes = {}
if committed_changes is not None:
for rel_path, status in committed_changes:
self.remove_path(rel_path)
if status == "modified":
use_committed = rel_path in local_changes
source_file = SourceFile(tests_root,
rel_path,
url_base,
use_committed=use_committed)
self.extend(source_file.manifest_items())
self.local_changes = LocalChanges(self)
local_paths = set()
for rel_path, status in local_changes.iteritems():
local_paths.add(rel_path)
if status == "modified":
existing_items = self._committed_with_path(rel_path)
source_file = SourceFile(tests_root,
rel_path,
url_base,
use_committed=False)
local_items = set(source_file.manifest_items())
updated_items = local_items - existing_items
self.local_changes.extend(updated_items)
else:
self.local_changes.add_deleted(rel_path)
if remove_missing_local:
for path in self._committed_paths() - local_paths:
self.local_changes.add_deleted(path)
self.update_reftests()
if new_rev is not None:
self.rev = new_rev
self.url_base = url_base
def update_reftests(self):
reftest_nodes = self.reftest_nodes.copy()
for path, items in self.local_changes.reftest_nodes.iteritems():
reftest_nodes[path] |= items
#TODO: remove locally deleted files
tests = set()
for items in reftest_nodes.values():
tests |= set(item for item in items if not item.is_reference)
has_inbound = set()
for path, items in reftest_nodes.iteritems():
for item in items:
for ref_url, ref_type in item.references:
has_inbound.add(ref_url)
if self.local_changes.reftest_nodes:
target = self.local_changes
else:
target = self
#TODO: Warn if there exist unreachable reftest nodes
for path, items in reftest_nodes.iteritems():
for item in items:
if item.url in has_inbound:
continue
target._data["reftest"][path].add(item)
def to_json(self):
out_items = {
item_type: sorted(
test.to_json()
for _, tests in items.iteritems()
for test in tests
)
for item_type, items in self._data.iteritems()
}
reftest_nodes = {from_os_path(key): [v.to_json() for v in value]
for key, value in self.reftest_nodes.iteritems()}
rv = {"url_base": self.url_base,
"rev": self.rev,
"local_changes": self.local_changes.to_json(),
"items": out_items,
"reftest_nodes": reftest_nodes,
"version": CURRENT_VERSION}
return rv
@classmethod
def from_json(cls, tests_root, obj):
version = obj.get("version")
if version != CURRENT_VERSION:
raise ManifestVersionMismatch
self = cls(git_rev=obj["rev"],
url_base=obj.get("url_base", "/"))
if not hasattr(obj, "iteritems"):
raise ManifestError
item_classes = {"testharness": TestharnessTest,
"reftest": RefTest,
"manual": ManualTest,
"stub": Stub,
"wdspec": WebdriverSpecTest}
source_files = {}
for k, values in obj["items"].iteritems():
if k not in item_types:
raise ManifestError
for v in values:
manifest_item = item_classes[k].from_json(self, tests_root, v,
source_files=source_files)
self._add(manifest_item)
for path, values in obj["reftest_nodes"].iteritems():
path = to_os_path(path)
for v in values:
item = RefTest.from_json(self, tests_root, v,
source_files=source_files)
self.reftest_nodes[path].add(item)
self.reftest_nodes_by_url[v["url"]] = item
self.local_changes = LocalChanges.from_json(self,
tests_root,
obj["local_changes"],
source_files=source_files)
return self
class LocalChanges(object):
def __init__(self, manifest):
self.manifest = manifest
self._data = dict((item_type, defaultdict(set)) for item_type in item_types)
self._deleted = set()
self.reftest_nodes = defaultdict(set)
self.reftest_nodes_by_url = {}
def add(self, item):
if item is None:
return
is_reference = False
if isinstance(item, RefTest):
self.reftest_nodes[item.path].add(item)
self.reftest_nodes_by_url[item.url] = item
is_reference = item.is_reference
if not is_reference:
self._add(item)
item.manifest = self.manifest
def _add(self, item):
self._data[item.item_type][item.path].add(item)
def extend(self, items):
for item in items:
self.add(item)
def add_deleted(self, path):
self._deleted.add(path)
def is_deleted(self, path):
return path in self._deleted
def itertypes(self, *types):
for item_type in types:
yield item_type, self._data[item_type]
def iterdeleted(self):
for item in self._deleted:
yield item
def __getitem__(self, item_type):
return self._data[item_type]
def to_json(self):
reftest_nodes = {from_os_path(key): [v.to_json() for v in value]
for key, value in self.reftest_nodes.iteritems()}
rv = {"items": defaultdict(dict),
"reftest_nodes": reftest_nodes,
"deleted": [from_os_path(path) for path in self._deleted]}
for test_type, paths in self._data.iteritems():
for path, tests in paths.iteritems():
path = from_os_path(path)
rv["items"][test_type][path] = [test.to_json() for test in tests]
return rv
@classmethod
def from_json(cls, manifest, tests_root, obj, source_files=None):
self = cls(manifest)
if not hasattr(obj, "iteritems"):
raise ManifestError
item_classes = {"testharness": TestharnessTest,
"reftest": RefTest,
"manual": ManualTest,
"stub": Stub,
"wdspec": WebdriverSpecTest}
for test_type, paths in obj["items"].iteritems():
for path, tests in paths.iteritems():
for test in tests:
manifest_item = item_classes[test_type].from_json(manifest,
tests_root,
test,
source_files=source_files)
self.add(manifest_item)
for path, values in obj["reftest_nodes"].iteritems():
path = to_os_path(path)
for v in values:
item = RefTest.from_json(self.manifest, tests_root, v,
source_files=source_files)
self.reftest_nodes[path].add(item)
self.reftest_nodes_by_url[item.url] = item
for item in obj["deleted"]:
self.add_deleted(to_os_path(item))
return self
def load(tests_root, manifest):
logger = get_logger()
# "manifest" is a path or file-like object.
if isinstance(manifest, basestring):
if os.path.exists(manifest):
logger.debug("Opening manifest at %s" % manifest)
else:
logger.debug("Creating new manifest at %s" % manifest)
try:
with open(manifest) as f:
rv = Manifest.from_json(tests_root, json.load(f))
except IOError:
rv = Manifest(None)
return rv
return Manifest.from_json(tests_root, json.load(manifest))
def write(manifest, manifest_path):
with open(manifest_path, "wb") as f:
json.dump(manifest.to_json(), f, sort_keys=True, indent=2, separators=(',', ': '))
f.write("\n")
| mpl-2.0 |
ahmadiga/min_edx | common/lib/xmodule/xmodule/modulestore/draft_and_published.py | 71 | 5876 | """
This module provides an abstraction for Module Stores that support Draft and Published branches.
"""
import threading
from abc import ABCMeta, abstractmethod
from contextlib import contextmanager
from . import ModuleStoreEnum, BulkOperationsMixin
# Things w/ these categories should never be marked as version=DRAFT
DIRECT_ONLY_CATEGORIES = ['course', 'chapter', 'sequential', 'about', 'static_tab', 'course_info']
class BranchSettingMixin(object):
"""
A mixin to manage a module store's branch setting.
The order of override is (from higher precedence to lower):
1. thread-specific setting temporarily set using the branch_setting contextmanager
2. the return value of the branch_setting_func passed into this mixin's init method
3. the default branch setting being ModuleStoreEnum.Branch.published_only
"""
def __init__(self, *args, **kwargs):
"""
:param branch_setting_func: a function that returns the default branch setting for this object.
If not specified, ModuleStoreEnum.Branch.published_only is used as the default setting.
"""
self.default_branch_setting_func = kwargs.pop(
'branch_setting_func',
lambda: ModuleStoreEnum.Branch.published_only
)
super(BranchSettingMixin, self).__init__(*args, **kwargs)
# cache the branch setting on a local thread to support a multi-threaded environment
self.thread_cache = threading.local()
@contextmanager
def branch_setting(self, branch_setting, course_id=None): # pylint: disable=unused-argument
"""
A context manager for temporarily setting a store's branch value on the current thread.
"""
previous_thread_branch_setting = getattr(self.thread_cache, 'branch_setting', None)
try:
self.thread_cache.branch_setting = branch_setting
yield
finally:
self.thread_cache.branch_setting = previous_thread_branch_setting
def get_branch_setting(self, course_id=None): # pylint: disable=unused-argument
"""
Returns the current branch_setting on the store.
Returns the thread-local setting, if set.
Otherwise, returns the default value of the setting function set during the store's initialization.
"""
# first check the thread-local cache
thread_local_branch_setting = getattr(self.thread_cache, 'branch_setting', None)
if thread_local_branch_setting:
return thread_local_branch_setting
else:
# return the default value
return self.default_branch_setting_func()
class ModuleStoreDraftAndPublished(BranchSettingMixin, BulkOperationsMixin):
"""
A mixin for a read-write database backend that supports two branches, Draft and Published, with
options to prefer Draft and fallback to Published.
"""
__metaclass__ = ABCMeta
@abstractmethod
def delete_item(self, location, user_id, revision=None, **kwargs):
raise NotImplementedError
@abstractmethod
def get_parent_location(self, location, revision=None, **kwargs):
raise NotImplementedError
@abstractmethod
def has_changes(self, xblock):
raise NotImplementedError
@abstractmethod
def publish(self, location, user_id):
raise NotImplementedError
@abstractmethod
def unpublish(self, location, user_id):
"""
Turn the published version into a draft, removing the published version.
Raises: InvalidVersionError if called on a DIRECT_ONLY_CATEGORY
"""
raise NotImplementedError
@abstractmethod
def revert_to_published(self, location, user_id):
raise NotImplementedError
@abstractmethod
def has_published_version(self, xblock):
raise NotImplementedError
@abstractmethod
def convert_to_draft(self, location, user_id):
raise NotImplementedError
@abstractmethod
def import_xblock(self, user_id, course_key, block_type, block_id, fields=None, runtime=None, **kwargs):
"""
Import the given xblock into the current branch setting: import completely overwrites any
existing block of the same id.
In ModuleStoreDraftAndPublished, importing a published block ensures that access from the draft
will get a block (either the one imported or a preexisting one). See xml_importer
"""
raise NotImplementedError
def _flag_publish_event(self, course_key):
"""
Wrapper around calls to fire the course_published signal
Unless we're nested in an active bulk operation, this simply fires the signal
otherwise a publish will be signalled at the end of the bulk operation
Arguments:
course_key - course_key to which the signal applies
"""
if self.signal_handler:
bulk_record = self._get_bulk_ops_record(course_key) if isinstance(self, BulkOperationsMixin) else None
if bulk_record and bulk_record.active:
bulk_record.has_publish_item = True
else:
# We remove the branch, because publishing always means copying from draft to published
self.signal_handler.send("course_published", course_key=course_key.for_branch(None))
class UnsupportedRevisionError(ValueError):
"""
This error is raised if a method is called with an unsupported revision parameter.
"""
def __init__(self, allowed_revisions=None):
if not allowed_revisions:
allowed_revisions = [
None,
ModuleStoreEnum.RevisionOption.published_only,
ModuleStoreEnum.RevisionOption.draft_only
]
super(UnsupportedRevisionError, self).__init__('revision not one of {}'.format(allowed_revisions))
| agpl-3.0 |
leesavide/pythonista-docs | Documentation/matplotlib/mpl_examples/pylab_examples/multi_image.py | 12 | 2201 | #!/usr/bin/env python
'''
Make a set of images with a single colormap, norm, and colorbar.
It also illustrates colorbar tick labelling with a multiplier.
'''
from matplotlib.pyplot import figure, show, axes, sci
from matplotlib import cm, colors
from matplotlib.font_manager import FontProperties
from numpy import amin, amax, ravel
from numpy.random import rand
Nr = 3
Nc = 2
fig = figure()
cmap = cm.cool
figtitle = 'Multiple images'
t = fig.text(0.5, 0.95, figtitle,
horizontalalignment='center',
fontproperties=FontProperties(size=16))
cax = fig.add_axes([0.2, 0.08, 0.6, 0.04])
w = 0.4
h = 0.22
ax = []
images = []
vmin = 1e40
vmax = -1e40
for i in range(Nr):
for j in range(Nc):
pos = [0.075 + j*1.1*w, 0.18 + i*1.2*h, w, h]
a = fig.add_axes(pos)
if i > 0:
a.set_xticklabels([])
# Make some fake data with a range that varies
# somewhat from one plot to the next.
data =((1+i+j)/10.0)*rand(10,20)*1e-6
dd = ravel(data)
# Manually find the min and max of all colors for
# use in setting the color scale.
vmin = min(vmin, amin(dd))
vmax = max(vmax, amax(dd))
images.append(a.imshow(data, cmap=cmap))
ax.append(a)
# Set the first image as the master, with all the others
# observing it for changes in cmap or norm.
class ImageFollower:
'update image in response to changes in clim or cmap on another image'
def __init__(self, follower):
self.follower = follower
def __call__(self, leader):
self.follower.set_cmap(leader.get_cmap())
self.follower.set_clim(leader.get_clim())
norm = colors.Normalize(vmin=vmin, vmax=vmax)
for i, im in enumerate(images):
im.set_norm(norm)
if i > 0:
images[0].callbacksSM.connect('changed', ImageFollower(im))
# The colorbar is also based on this master image.
fig.colorbar(images[0], cax, orientation='horizontal')
# We need the following only if we want to run this interactively and
# modify the colormap:
axes(ax[0]) # Return the current axes to the first one,
sci(images[0]) # because the current image must be in current axes.
show()
| apache-2.0 |
wolfflow/electron | script/upload-windows-pdb.py | 156 | 1174 | #!/usr/bin/env python
import os
import glob
import sys
from lib.config import s3_config
from lib.util import atom_gyp, execute, rm_rf, safe_mkdir, s3put
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
SYMBOLS_DIR = 'dist\\symbols'
DOWNLOAD_DIR = 'vendor\\brightray\\vendor\\download\\libchromiumcontent'
PROJECT_NAME = atom_gyp()['project_name%']
PRODUCT_NAME = atom_gyp()['product_name%']
PDB_LIST = [
'out\\R\\{0}.exe.pdb'.format(PROJECT_NAME),
'out\\R\\node.dll.pdb',
]
def main():
os.chdir(SOURCE_ROOT)
rm_rf(SYMBOLS_DIR)
safe_mkdir(SYMBOLS_DIR)
for pdb in PDB_LIST:
run_symstore(pdb, SYMBOLS_DIR, PRODUCT_NAME)
bucket, access_key, secret_key = s3_config()
files = glob.glob(SYMBOLS_DIR + '/*.pdb/*/*.pdb')
files = [f.lower() for f in files]
upload_symbols(bucket, access_key, secret_key, files)
def run_symstore(pdb, dest, product):
execute(['symstore', 'add', '/r', '/f', pdb, '/s', dest, '/t', product])
def upload_symbols(bucket, access_key, secret_key, files):
s3put(bucket, access_key, secret_key, SYMBOLS_DIR, 'atom-shell/symbols',
files)
if __name__ == '__main__':
sys.exit(main())
| mit |
DARKPOP/external_chromium_org | chrome/test/chromedriver/run_buildbot_steps.py | 26 | 19204 | #!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Runs all the buildbot steps for ChromeDriver except for update/compile."""
import bisect
import csv
import datetime
import glob
import json
import optparse
import os
import platform as platform_module
import re
import shutil
import StringIO
import sys
import tempfile
import time
import urllib2
_THIS_DIR = os.path.abspath(os.path.dirname(__file__))
GS_CHROMEDRIVER_BUCKET = 'gs://chromedriver'
GS_CHROMEDRIVER_DATA_BUCKET = 'gs://chromedriver-data'
GS_CHROMEDRIVER_RELEASE_URL = 'http://chromedriver.storage.googleapis.com'
GS_CONTINUOUS_URL = GS_CHROMEDRIVER_DATA_BUCKET + '/continuous'
GS_PREBUILTS_URL = GS_CHROMEDRIVER_DATA_BUCKET + '/prebuilts'
GS_SERVER_LOGS_URL = GS_CHROMEDRIVER_DATA_BUCKET + '/server_logs'
SERVER_LOGS_LINK = (
'http://chromedriver-data.storage.googleapis.com/server_logs')
TEST_LOG_FORMAT = '%s_log.json'
GS_GIT_LOG_URL = (
'https://chromium.googlesource.com/chromium/src/+/%s?format=json')
GS_SEARCH_PATTERN = (
r'Cr-Commit-Position: refs/heads/master@{#(\d+)}')
CR_REV_URL = 'https://cr-rev.appspot.com/_ah/api/crrev/v1/redirect/%s'
SCRIPT_DIR = os.path.join(_THIS_DIR, os.pardir, os.pardir, os.pardir, os.pardir,
os.pardir, os.pardir, os.pardir, 'scripts')
SITE_CONFIG_DIR = os.path.join(_THIS_DIR, os.pardir, os.pardir, os.pardir,
os.pardir, os.pardir, os.pardir, os.pardir,
'site_config')
sys.path.append(SCRIPT_DIR)
sys.path.append(SITE_CONFIG_DIR)
import archive
import chrome_paths
from slave import gsutil_download
from slave import slave_utils
import util
def _ArchivePrebuilts(revision):
"""Uploads the prebuilts to google storage."""
util.MarkBuildStepStart('archive prebuilts')
zip_path = util.Zip(os.path.join(chrome_paths.GetBuildDir(['chromedriver']),
'chromedriver'))
if slave_utils.GSUtilCopy(
zip_path,
'%s/%s' % (GS_PREBUILTS_URL, 'r%s.zip' % revision)):
util.MarkBuildStepError()
def _ArchiveServerLogs():
"""Uploads chromedriver server logs to google storage."""
util.MarkBuildStepStart('archive chromedriver server logs')
for server_log in glob.glob(os.path.join(tempfile.gettempdir(),
'chromedriver_*')):
base_name = os.path.basename(server_log)
util.AddLink(base_name, '%s/%s' % (SERVER_LOGS_LINK, base_name))
slave_utils.GSUtilCopy(
server_log,
'%s/%s' % (GS_SERVER_LOGS_URL, base_name),
mimetype='text/plain')
def _DownloadPrebuilts():
"""Downloads the most recent prebuilts from google storage."""
util.MarkBuildStepStart('Download latest chromedriver')
zip_path = os.path.join(util.MakeTempDir(), 'build.zip')
if gsutil_download.DownloadLatestFile(GS_PREBUILTS_URL,
GS_PREBUILTS_URL + '/r',
zip_path):
util.MarkBuildStepError()
util.Unzip(zip_path, chrome_paths.GetBuildDir(['host_forwarder']))
def _GetTestResultsLog(platform):
"""Gets the test results log for the given platform.
Args:
platform: The platform that the test results log is for.
Returns:
A dictionary where the keys are SVN revisions and the values are booleans
indicating whether the tests passed.
"""
temp_log = tempfile.mkstemp()[1]
log_name = TEST_LOG_FORMAT % platform
result = slave_utils.GSUtilDownloadFile(
'%s/%s' % (GS_CHROMEDRIVER_DATA_BUCKET, log_name), temp_log)
if result:
return {}
with open(temp_log, 'rb') as log_file:
json_dict = json.load(log_file)
# Workaround for json encoding dictionary keys as strings.
return dict([(int(v[0]), v[1]) for v in json_dict.items()])
def _PutTestResultsLog(platform, test_results_log):
"""Pushes the given test results log to google storage."""
temp_dir = util.MakeTempDir()
log_name = TEST_LOG_FORMAT % platform
log_path = os.path.join(temp_dir, log_name)
with open(log_path, 'wb') as log_file:
json.dump(test_results_log, log_file)
if slave_utils.GSUtilCopyFile(log_path, GS_CHROMEDRIVER_DATA_BUCKET):
raise Exception('Failed to upload test results log to google storage')
def _UpdateTestResultsLog(platform, revision, passed):
"""Updates the test results log for the given platform.
Args:
platform: The platform name.
revision: The SVN revision number.
passed: Boolean indicating whether the tests passed at this revision.
"""
assert isinstance(revision, int), 'The revision must be an integer'
log = _GetTestResultsLog(platform)
if len(log) > 500:
del log[min(log.keys())]
assert revision not in log, 'Results already exist for revision %s' % revision
log[revision] = bool(passed)
_PutTestResultsLog(platform, log)
def _GetVersion():
"""Get the current chromedriver version."""
with open(os.path.join(_THIS_DIR, 'VERSION'), 'r') as f:
return f.read().strip()
def _GetSupportedChromeVersions():
"""Get the minimum and maximum supported Chrome versions.
Returns:
A tuple of the form (min_version, max_version).
"""
# Minimum supported Chrome version is embedded as:
# const int kMinimumSupportedChromeVersion[] = {27, 0, 1453, 0};
with open(os.path.join(_THIS_DIR, 'chrome', 'version.cc'), 'r') as f:
lines = f.readlines()
chrome_min_version_line = [
x for x in lines if 'kMinimumSupportedChromeVersion' in x]
chrome_min_version = chrome_min_version_line[0].split('{')[1].split(',')[0]
with open(os.path.join(chrome_paths.GetSrc(), 'chrome', 'VERSION'), 'r') as f:
chrome_max_version = f.readlines()[0].split('=')[1].strip()
return (chrome_min_version, chrome_max_version)
def _RevisionState(test_results_log, revision):
"""Check the state of tests at a given SVN revision.
Considers tests as having passed at a revision if they passed at revisons both
before and after.
Args:
test_results_log: A test results log dictionary from _GetTestResultsLog().
revision: The revision to check at.
Returns:
'passed', 'failed', or 'unknown'
"""
assert isinstance(revision, int), 'The revision must be an integer'
keys = sorted(test_results_log.keys())
# Return passed if the exact revision passed on Android.
if revision in test_results_log:
return 'passed' if test_results_log[revision] else 'failed'
# Tests were not run on this exact revision on Android.
index = bisect.bisect_right(keys, revision)
# Tests have not yet run on Android at or above this revision.
if index == len(test_results_log):
return 'unknown'
# No log exists for any prior revision, assume it failed.
if index == 0:
return 'failed'
# Return passed if the revisions on both sides passed.
if test_results_log[keys[index]] and test_results_log[keys[index - 1]]:
return 'passed'
return 'failed'
def _ArchiveGoodBuild(platform, revision):
"""Archive chromedriver binary if the build is green."""
assert platform != 'android'
util.MarkBuildStepStart('archive build')
server_name = 'chromedriver'
if util.IsWindows():
server_name += '.exe'
zip_path = util.Zip(os.path.join(chrome_paths.GetBuildDir([server_name]),
server_name))
build_name = 'chromedriver_%s_%s.%s.zip' % (
platform, _GetVersion(), revision)
build_url = '%s/%s' % (GS_CONTINUOUS_URL, build_name)
if slave_utils.GSUtilCopy(zip_path, build_url):
util.MarkBuildStepError()
(latest_fd, latest_file) = tempfile.mkstemp()
os.write(latest_fd, build_name)
os.close(latest_fd)
latest_url = '%s/latest_%s' % (GS_CONTINUOUS_URL, platform)
if slave_utils.GSUtilCopy(latest_file, latest_url, mimetype='text/plain'):
util.MarkBuildStepError()
os.remove(latest_file)
def _WasReleased(version, platform):
"""Check if the specified version is released for the given platform."""
result, _ = slave_utils.GSUtilListBucket(
'%s/%s/chromedriver_%s.zip' % (GS_CHROMEDRIVER_BUCKET, version, platform),
[])
return result == 0
def _MaybeRelease(platform):
"""Releases a release candidate if conditions are right."""
assert platform != 'android'
version = _GetVersion()
# Check if the current version has already been released.
if _WasReleased(version, platform):
return
# Fetch Android test results.
android_test_results = _GetTestResultsLog('android')
# Fetch release candidates.
result, output = slave_utils.GSUtilListBucket(
'%s/chromedriver_%s_%s*' % (
GS_CONTINUOUS_URL, platform, version),
[])
assert result == 0 and output, 'No release candidates found'
candidate_pattern = re.compile(
r'.*/chromedriver_%s_%s\.(\d+)\.zip$' % (platform, version))
candidates = []
for line in output.strip().split('\n'):
result = candidate_pattern.match(line)
if not result:
print 'Ignored line "%s"' % line
continue
candidates.append(int(result.group(1)))
# Release the latest candidate build that passed Android, if any.
# In this way, if a hot fix is needed, we can delete the release from
# the chromedriver bucket instead of bumping up the release version number.
candidates.sort(reverse=True)
for revision in candidates:
android_result = _RevisionState(android_test_results, revision)
if android_result == 'failed':
print 'Android tests did not pass at revision', revision
elif android_result == 'passed':
print 'Android tests passed at revision', revision
candidate = 'chromedriver_%s_%s.%s.zip' % (platform, version, revision)
_Release('%s/%s' % (GS_CONTINUOUS_URL, candidate), version, platform)
break
else:
print 'Android tests have not run at a revision as recent as', revision
def _Release(build, version, platform):
"""Releases the given candidate build."""
release_name = 'chromedriver_%s.zip' % platform
util.MarkBuildStepStart('releasing %s' % release_name)
temp_dir = util.MakeTempDir()
slave_utils.GSUtilCopy(build, temp_dir)
zip_path = os.path.join(temp_dir, os.path.basename(build))
if util.IsLinux():
util.Unzip(zip_path, temp_dir)
server_path = os.path.join(temp_dir, 'chromedriver')
util.RunCommand(['strip', server_path])
zip_path = util.Zip(server_path)
slave_utils.GSUtilCopy(
zip_path, '%s/%s/%s' % (GS_CHROMEDRIVER_BUCKET, version, release_name))
_MaybeUploadReleaseNotes(version)
_MaybeUpdateLatestRelease(version)
def _GetWebPageContent(url):
"""Return the content of the web page specified by the given url."""
return urllib2.urlopen(url).read()
def _MaybeUploadReleaseNotes(version):
"""Upload release notes if conditions are right."""
# Check if the current version has already been released.
notes_name = 'notes.txt'
notes_url = '%s/%s/%s' % (GS_CHROMEDRIVER_BUCKET, version, notes_name)
prev_version = '.'.join([version.split('.')[0],
str(int(version.split('.')[1]) - 1)])
prev_notes_url = '%s/%s/%s' % (
GS_CHROMEDRIVER_BUCKET, prev_version, notes_name)
result, _ = slave_utils.GSUtilListBucket(notes_url, [])
if result == 0:
return
fixed_issues = []
query = ('https://code.google.com/p/chromedriver/issues/csv?'
'q=status%3AToBeReleased&colspec=ID%20Summary')
issues = StringIO.StringIO(_GetWebPageContent(query).split('\n', 1)[1])
for issue in csv.reader(issues):
if not issue:
continue
issue_id = issue[0]
desc = issue[1]
labels = issue[2]
fixed_issues += ['Resolved issue %s: %s [%s]' % (issue_id, desc, labels)]
old_notes = ''
temp_notes_fname = tempfile.mkstemp()[1]
if not slave_utils.GSUtilDownloadFile(prev_notes_url, temp_notes_fname):
with open(temp_notes_fname, 'rb') as f:
old_notes = f.read()
new_notes = '----------ChromeDriver v%s (%s)----------\n%s\n%s\n\n%s' % (
version, datetime.date.today().isoformat(),
'Supports Chrome v%s-%s' % _GetSupportedChromeVersions(),
'\n'.join(fixed_issues),
old_notes)
with open(temp_notes_fname, 'w') as f:
f.write(new_notes)
if slave_utils.GSUtilCopy(temp_notes_fname, notes_url, mimetype='text/plain'):
util.MarkBuildStepError()
def _MaybeUpdateLatestRelease(version):
"""Update the file LATEST_RELEASE with the latest release version number."""
latest_release_fname = 'LATEST_RELEASE'
latest_release_url = '%s/%s' % (GS_CHROMEDRIVER_BUCKET, latest_release_fname)
# Check if LATEST_RELEASE is up-to-date.
latest_released_version = _GetWebPageContent(
'%s/%s' % (GS_CHROMEDRIVER_RELEASE_URL, latest_release_fname))
if version == latest_released_version:
return
# Check if chromedriver was released on all supported platforms.
supported_platforms = ['linux32', 'linux64', 'mac32', 'win32']
for platform in supported_platforms:
if not _WasReleased(version, platform):
return
util.MarkBuildStepStart('updating LATEST_RELEASE to %s' % version)
temp_latest_release_fname = tempfile.mkstemp()[1]
with open(temp_latest_release_fname, 'w') as f:
f.write(version)
if slave_utils.GSUtilCopy(temp_latest_release_fname, latest_release_url,
mimetype='text/plain'):
util.MarkBuildStepError()
def _CleanTmpDir():
tmp_dir = tempfile.gettempdir()
print 'cleaning temp directory:', tmp_dir
for file_name in os.listdir(tmp_dir):
file_path = os.path.join(tmp_dir, file_name)
if os.path.isdir(file_path):
print 'deleting sub-directory', file_path
shutil.rmtree(file_path, True)
if file_name.startswith('chromedriver_'):
print 'deleting file', file_path
os.remove(file_path)
def _GetCommitPositionFromGitHash(snapshot_hashcode):
json_url = GS_GIT_LOG_URL % snapshot_hashcode
try:
response = urllib2.urlopen(json_url)
except urllib2.HTTPError as error:
util.PrintAndFlush('HTTP Error %d' % error.getcode())
return None
except urllib2.URLError as error:
util.PrintAndFlush('URL Error %s' % error.message)
return None
data = json.loads(response.read()[4:])
if 'message' in data:
message = data['message'].split('\n')
message = [line for line in message if line.strip()]
search_pattern = re.compile(GS_SEARCH_PATTERN)
result = search_pattern.search(message[len(message)-1])
if result:
return result.group(1)
util.PrintAndFlush('Failed to get svn revision number for %s' %
snapshot_hashcode)
return None
def _GetGitHashFromCommitPosition(commit_position):
json_url = CR_REV_URL % commit_position
try:
response = urllib2.urlopen(json_url)
except urllib2.HTTPError as error:
util.PrintAndFlush('HTTP Error %d' % error.getcode())
return None
except urllib2.URLError as error:
util.PrintAndFlush('URL Error %s' % error.message)
return None
data = json.loads(response.read())
if 'git_sha' in data:
return data['git_sha']
util.PrintAndFlush('Failed to get git hash for %s' % commit_position)
return None
def _WaitForLatestSnapshot(revision):
util.MarkBuildStepStart('wait_for_snapshot')
def _IsRevisionNumber(revision):
if isinstance(revision, int):
return True
else:
return revision.isdigit()
while True:
snapshot_revision = archive.GetLatestSnapshotVersion()
if not _IsRevisionNumber(snapshot_revision):
snapshot_revision = _GetCommitPositionFromGitHash(snapshot_revision)
if revision is not None and snapshot_revision is not None:
if int(snapshot_revision) >= int(revision):
break
util.PrintAndFlush('Waiting for snapshot >= %s, found %s' %
(revision, snapshot_revision))
time.sleep(60)
util.PrintAndFlush('Got snapshot revision %s' % snapshot_revision)
def _AddToolsToPath(platform_name):
"""Add some tools like Ant and Java to PATH for testing steps to use."""
paths = []
error_message = ''
if platform_name == 'win32':
paths = [
# Path to Ant and Java, required for the java acceptance tests.
'C:\\Program Files (x86)\\Java\\ant\\bin',
'C:\\Program Files (x86)\\Java\\jre\\bin',
]
error_message = ('Java test steps will fail as expected and '
'they can be ignored.\n'
'Ant, Java or others might not be installed on bot.\n'
'Please refer to page "WATERFALL" on site '
'go/chromedriver.')
if paths:
util.MarkBuildStepStart('Add tools to PATH')
path_missing = False
for path in paths:
if not os.path.isdir(path) or not os.listdir(path):
print 'Directory "%s" is not found or empty.' % path
path_missing = True
if path_missing:
print error_message
util.MarkBuildStepError()
return
os.environ['PATH'] += os.pathsep + os.pathsep.join(paths)
def main():
parser = optparse.OptionParser()
parser.add_option(
'', '--android-packages',
help=('Comma separated list of application package names, '
'if running tests on Android.'))
parser.add_option(
'-r', '--revision', help='Chromium revision')
parser.add_option(
'', '--update-log', action='store_true',
help='Update the test results log (only applicable to Android)')
options, _ = parser.parse_args()
bitness = '32'
if util.IsLinux() and platform_module.architecture()[0] == '64bit':
bitness = '64'
platform = '%s%s' % (util.GetPlatformName(), bitness)
if options.android_packages:
platform = 'android'
_CleanTmpDir()
if not options.revision:
commit_position = None
elif options.revision.isdigit():
commit_position = options.revision
else:
commit_position = _GetCommitPositionFromGitHash(options.revision)
if platform == 'android':
if not options.revision and options.update_log:
parser.error('Must supply a --revision with --update-log')
_DownloadPrebuilts()
else:
if not options.revision:
parser.error('Must supply a --revision')
if platform == 'linux64':
_ArchivePrebuilts(commit_position)
_WaitForLatestSnapshot(commit_position)
_AddToolsToPath(platform)
cmd = [
sys.executable,
os.path.join(_THIS_DIR, 'test', 'run_all_tests.py'),
]
if platform == 'android':
cmd.append('--android-packages=' + options.android_packages)
passed = (util.RunCommand(cmd) == 0)
_ArchiveServerLogs()
if platform == 'android':
if options.update_log:
util.MarkBuildStepStart('update test result log')
_UpdateTestResultsLog(platform, commit_position, passed)
elif passed:
_ArchiveGoodBuild(platform, commit_position)
_MaybeRelease(platform)
if not passed:
# Make sure the build is red if there is some uncaught exception during
# running run_all_tests.py.
util.MarkBuildStepStart('run_all_tests.py')
util.MarkBuildStepError()
# Add a "cleanup" step so that errors from runtest.py or bb_device_steps.py
# (which invoke this script) are kept in thier own build step.
util.MarkBuildStepStart('cleanup')
if __name__ == '__main__':
main()
| bsd-3-clause |
pedrobaeza/OpenUpgrade | addons/fleet/migrations/8.0.0.1/post_migration.py | 16 | 1304 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2014 Akretion
# (<http://www.akretion.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import pooler, SUPERUSER_ID
from openerp.openupgrade import openupgrade, openupgrade_80
@openupgrade.migrate()
def migrate(cr, version):
pool = pooler.get_pool(cr.dbname)
uid = SUPERUSER_ID
openupgrade_80.set_message_last_post(
cr, uid, pool, ['fleet.vehicule']
)
| agpl-3.0 |
ajponte/yelpML | maps/tests/7.py | 1 | 4726 | test = {
'name': 'Problem 7',
'points': 3,
'suites': [
{
'cases': [
{
'answer': '7b94a2861b435311f9fceeb5e6f092c4',
'choices': [
'the restaurants in restaurants',
'the names of restaurants in restaurants',
'the extracted values for each restaurant in restaurants',
'the restaurants reviewed by user'
],
'hidden': False,
'locked': True,
'question': 'What does the list xs represent?'
},
{
'answer': '901ae86eb8ae688b7a7ca7c1f77cab35',
'choices': [
'the ratings for the restaurants reviewed by user',
'the ratings for the restaurants in restaurants',
'the names for the restaurants reviewed by user',
'the names for the restaurants in restaurants'
],
'hidden': False,
'locked': True,
'question': 'What does the list ys represent?'
}
],
'scored': False,
'type': 'concept'
},
{
'cases': [
{
'code': r"""
>>> user = make_user('John D.', [
... make_review('A', 1),
... make_review('B', 5),
... make_review('C', 2),
... make_review('D', 2.5),
... ])
>>> restaurant = make_restaurant('New', [-10, 2], [], 2, [
... make_review('New', 4),
... ])
>>> cluster = [
... make_restaurant('B', [4, 2], [], 1, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 2], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3),
... ]),
... ]
>>> pred, r_squared = find_predictor(user, cluster, restaurant_price)
>>> round(pred(restaurant), 5)
4.0
>>> round(r_squared, 5)
1.0
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> user = make_user('John D.', [
... make_review('A', 1),
... make_review('B', 5),
... make_review('C', 2),
... make_review('D', 2.5),
... ])
>>> restaurant = make_restaurant('New', [-10, 2], [], 2, [
... make_review('New', 4),
... ])
>>> cluster = [
... make_restaurant('B', [4, 2], [], 1, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 2], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3),
... ]),
... ]
>>> pred, r_squared = find_predictor(user, cluster, restaurant_mean_rating)
>>> round(pred(restaurant), 5)
3.9359
>>> round(r_squared, 5)
0.99256
""",
'hidden': False,
'locked': False
},
{
'code': r"""
>>> user = make_user('John D.', [
... make_review('A', 1),
... make_review('B', 5),
... make_review('C', 2),
... make_review('D', 2.5),
... ])
>>> restaurant = make_restaurant('New', [-10, 2], [], 2, [
... make_review('New', 4),
... ])
>>> cluster = [
... make_restaurant('B', [4, 2], [], 1, [
... make_review('B', 5)
... ]),
... make_restaurant('C', [-2, 6], [], 4, [
... make_review('C', 2)
... ]),
... make_restaurant('D', [4, 2], [], 3.5, [
... make_review('D', 2.5),
... make_review('D', 3),
... ]),
... ]
>>> pred, r_squared = find_predictor(user, cluster, restaurant_num_ratings)
>>> round(pred(restaurant), 5)
3.5
>>> round(r_squared, 5)
0.12903
""",
'hidden': False,
'locked': False
}
],
'scored': True,
'setup': r"""
>>> import tests.test_functions as test
>>> import recommend
>>> test.swap_implementations(recommend)
>>> from recommend import *
""",
'teardown': r"""
>>> test.restore_implementations(recommend)
""",
'type': 'doctest'
}
]
} | mit |
stephanehenry27/Sickbeard-anime | tests/db_tests.py | 4 | 1365 | # coding=UTF-8
# Author: Dennis Lutter <lad1337@gmail.com>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import unittest
import test_lib as test
from sickbeard import db
class DBBasicTests(test.SickbeardTestDBCase):
def test_select(self):
curDB = db.DBConnection()
curDB.select("SELECT * FROM tv_episodes WHERE showid = ? AND location != ''", [0000])
if __name__ == '__main__':
print "=================="
print "STARTING - DB TESTS"
print "=================="
print "######################################################################"
suite = unittest.TestLoader().loadTestsFromTestCase(DBBasicTests)
unittest.TextTestRunner(verbosity=2).run(suite)
| gpl-3.0 |
dvliman/jaikuengine | .google_appengine/lib/django-1.3/tests/regressiontests/backends/models.py | 55 | 2436 | from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db import connection
class Square(models.Model):
root = models.IntegerField()
square = models.PositiveIntegerField()
def __unicode__(self):
return "%s ** 2 == %s" % (self.root, self.square)
class Person(models.Model):
first_name = models.CharField(max_length=20)
last_name = models.CharField(max_length=20)
def __unicode__(self):
return u'%s %s' % (self.first_name, self.last_name)
class SchoolClass(models.Model):
year = models.PositiveIntegerField()
day = models.CharField(max_length=9, blank=True)
last_updated = models.DateTimeField()
# Unfortunately, the following model breaks MySQL hard.
# Until #13711 is fixed, this test can't be run under MySQL.
if connection.features.supports_long_model_names:
class VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ(models.Model):
class Meta:
# We need to use a short actual table name or
# we hit issue #8548 which we're not testing!
verbose_name = 'model_with_long_table_name'
primary_key_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.AutoField(primary_key=True)
charfield_is_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.CharField(max_length=100)
m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz = models.ManyToManyField(Person,blank=True)
class Tag(models.Model):
name = models.CharField(max_length=30)
content_type = models.ForeignKey(ContentType, related_name='backend_tags')
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey('content_type', 'object_id')
class Post(models.Model):
name = models.CharField(max_length=30)
text = models.TextField()
tags = generic.GenericRelation('Tag')
class Meta:
db_table = 'CaseSensitive_Post'
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
def __unicode__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter)
def __unicode__(self):
return self.headline
| apache-2.0 |
ezequielpereira/Time-Line | libs64/wx/lib/agw/ribbon/control.py | 6 | 5517 | """
L{RibbonControl} serves as a base class for all controls which share the ribbon
charactertics of having a ribbon art provider, and (optionally) non-continous
resizing.
Description
===========
Despite what the name may imply, it is not the top-level control for creating a
ribbon interface - that is L{RibbonBar}. Ribbon controls often have a region which
is "transparent", and shows the contents of the ribbon page or panel behind it.
If implementing a new ribbon control, then it may be useful to realise that this
effect is done by the art provider when painting the background of the control,
and hence in the paint handler for the new control, you should call a draw background
method on the art provider (L{RibbonMSWArtProvider.DrawButtonBarBackground} and
L{RibbonMSWArtProvider.DrawToolBarBackground} typically just redraw what is behind the
rectangle being painted) if you want transparent regions.
"""
import wx
class RibbonControl(wx.PyControl):
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=0,
validator=wx.DefaultValidator, name="RibbonControl"):
wx.PyControl.__init__(self, parent, id, pos, size, style, validator, name)
self._art = None
if isinstance(parent, RibbonControl):
self._art = parent.GetArtProvider()
def SetArtProvider(self, art):
"""
Set the art provider to be used.
In many cases, setting the art provider will also set the art provider on all
child windows which extend L{RibbonControl}. In most cases, controls will not
take ownership of the given pointer, with the notable exception being
L{RibbonBar.SetArtProvider}.
:param `art`: MISSING DESCRIPTION.
"""
self._art = art
def GetArtProvider(self):
"""
Get the art provider to be used.
Note that until an art provider has been set in some way, this function may
return ``None``.
"""
return self._art
def IsSizingContinuous(self):
"""
:returns: ``True`` if this window can take any size (greater than its minimum size),
``False`` if it can only take certain sizes.
:see: L{GetNextSmallerSize}, L{GetNextLargerSize}
"""
return True
def DoGetNextSmallerSize(self, direction, size):
"""
Implementation of L{GetNextSmallerSize}.
Controls which have non-continuous sizing must override this virtual function
rather than L{GetNextSmallerSize}.
:param `direction`: MISSING DESCRIPTION;
:param `relative_to`: MISSING DESCRIPTION.
"""
# Dummy implementation for code which doesn't check for IsSizingContinuous() == true
minimum = self.GetMinSize()
if direction & wx.HORIZONTAL and size.x > minimum.x:
size.x -= 1
if direction & wx.VERTICAL and size.y > minimum.y:
size.y -= 1
return size
def DoGetNextLargerSize(self, direction, size):
"""
Implementation of L{GetNextLargerSize}.
Controls which have non-continuous sizing must override this virtual function
rather than L{GetNextLargerSize}.
:param `direction`: MISSING DESCRIPTION;
:param `relative_to`: MISSING DESCRIPTION.
"""
# Dummy implementation for code which doesn't check for IsSizingContinuous() == true
if direction & wx.HORIZONTAL:
size.x += 1
if direction & wx.VERTICAL:
size.y += 1
return size
def GetNextSmallerSize(self, direction, relative_to=None):
"""
If sizing is not continuous, then return a suitable size for the control which
is smaller than the given size.
:param `direction`: The direction(s) in which the size should reduce;
:param `relative_to`: The size for which a smaller size should be found.
:returns: if there is no smaller size, otherwise a suitable size which is smaller
in the given direction(s), and the same as in the other direction (if any).
:see: L{IsSizingContinuous}, L{DoGetNextSmallerSize}
"""
if relative_to is not None:
return self.DoGetNextSmallerSize(direction, relative_to)
return self.DoGetNextSmallerSize(direction, self.GetSize())
def GetNextLargerSize(self, direction, relative_to=None):
"""
If sizing is not continuous, then return a suitable size for the control which
is larger then the given size.
:param `direction`: The direction(s) in which the size should increase;
:param `relative_to`: The size for which a larger size should be found.
:returns: if there is no larger size, otherwise a suitable size which is larger
in the given direction(s), and the same as in the other direction (if any).
:see: L{IsSizingContinuous}, L{DoGetNextLargerSize}
"""
if relative_to is not None:
return self.DoGetNextLargerSize(direction, relative_to)
return self.DoGetNextLargerSize(direction, self.GetSize())
def Realize(self):
"""
Perform initial size and layout calculations after children have been added,
and/or realize children.
"""
pass
def Realise(self):
"""
Alias for L{Realize}.
"""
pass
| gpl-3.0 |
dayatz/taiga-back | tests/integration/resources_permissions/test_projects_choices_resources.py | 1 | 96198 | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2017 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2017 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2017 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# Copyright (C) 2014-2017 Anler Hernández <hello@anler.me>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.core.urlresolvers import reverse
from taiga.base.utils import json
from taiga.projects import choices as project_choices
from taiga.projects import serializers
from taiga.users.serializers import RoleSerializer
from taiga.permissions.choices import MEMBERS_PERMISSIONS
from tests import factories as f
from tests.utils import helper_test_http_method
import pytest
pytestmark = pytest.mark.django_db
@pytest.fixture
def data():
m = type("Models", (object,), {})
m.registered_user = f.UserFactory.create()
m.project_member_with_perms = f.UserFactory.create()
m.project_member_without_perms = f.UserFactory.create()
m.project_owner = f.UserFactory.create()
m.other_user = f.UserFactory.create()
m.superuser = f.UserFactory.create(is_superuser=True)
m.public_project = f.ProjectFactory(is_private=False,
anon_permissions=['view_project'],
public_permissions=['view_project'],
owner=m.project_owner,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.private_project1 = f.ProjectFactory(is_private=True,
anon_permissions=['view_project'],
public_permissions=['view_project'],
owner=m.project_owner,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.private_project2 = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.blocked_project = f.ProjectFactory(is_private=True,
anon_permissions=[],
public_permissions=[],
owner=m.project_owner,
blocked_code=project_choices.BLOCKED_BY_STAFF,
tags_colors = [("tag1", "#123123"), ("tag2", "#456456"), ("tag3", "#111222")])
m.public_membership = f.MembershipFactory(project=m.public_project,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.public_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
m.private_membership1 = f.MembershipFactory(project=m.private_project1,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.private_project1,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.private_project1,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.private_project1,
role__permissions=[])
m.private_membership2 = f.MembershipFactory(project=m.private_project2,
user=m.project_member_with_perms,
email=m.project_member_with_perms.email,
role__project=m.private_project2,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.private_project2,
user=m.project_member_without_perms,
email=m.project_member_without_perms.email,
role__project=m.private_project2,
role__permissions=[])
m.blocked_membership = f.MembershipFactory(project=m.blocked_project,
user=m.project_member_with_perms,
role__project=m.blocked_project,
role__permissions=list(map(lambda x: x[0], MEMBERS_PERMISSIONS)))
f.MembershipFactory(project=m.blocked_project,
user=m.project_member_without_perms,
role__project=m.blocked_project,
role__permissions=[])
f.MembershipFactory(project=m.public_project,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project1,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.private_project2,
user=m.project_owner,
is_admin=True)
f.MembershipFactory(project=m.blocked_project,
user=m.project_owner,
is_admin=True)
m.public_epic_status = f.EpicStatusFactory(project=m.public_project)
m.private_epic_status1 = f.EpicStatusFactory(project=m.private_project1)
m.private_epic_status2 = f.EpicStatusFactory(project=m.private_project2)
m.blocked_epic_status = f.EpicStatusFactory(project=m.blocked_project)
m.public_points = f.PointsFactory(project=m.public_project)
m.private_points1 = f.PointsFactory(project=m.private_project1)
m.private_points2 = f.PointsFactory(project=m.private_project2)
m.blocked_points = f.PointsFactory(project=m.blocked_project)
m.public_user_story_status = f.UserStoryStatusFactory(project=m.public_project)
m.private_user_story_status1 = f.UserStoryStatusFactory(project=m.private_project1)
m.private_user_story_status2 = f.UserStoryStatusFactory(project=m.private_project2)
m.blocked_user_story_status = f.UserStoryStatusFactory(project=m.blocked_project)
m.public_task_status = f.TaskStatusFactory(project=m.public_project)
m.private_task_status1 = f.TaskStatusFactory(project=m.private_project1)
m.private_task_status2 = f.TaskStatusFactory(project=m.private_project2)
m.blocked_task_status = f.TaskStatusFactory(project=m.blocked_project)
m.public_issue_status = f.IssueStatusFactory(project=m.public_project)
m.private_issue_status1 = f.IssueStatusFactory(project=m.private_project1)
m.private_issue_status2 = f.IssueStatusFactory(project=m.private_project2)
m.blocked_issue_status = f.IssueStatusFactory(project=m.blocked_project)
m.public_issue_type = f.IssueTypeFactory(project=m.public_project)
m.private_issue_type1 = f.IssueTypeFactory(project=m.private_project1)
m.private_issue_type2 = f.IssueTypeFactory(project=m.private_project2)
m.blocked_issue_type = f.IssueTypeFactory(project=m.blocked_project)
m.public_priority = f.PriorityFactory(project=m.public_project)
m.private_priority1 = f.PriorityFactory(project=m.private_project1)
m.private_priority2 = f.PriorityFactory(project=m.private_project2)
m.blocked_priority = f.PriorityFactory(project=m.blocked_project)
m.public_severity = f.SeverityFactory(project=m.public_project)
m.private_severity1 = f.SeverityFactory(project=m.private_project1)
m.private_severity2 = f.SeverityFactory(project=m.private_project2)
m.blocked_severity = f.SeverityFactory(project=m.blocked_project)
m.project_template = m.public_project.creation_template
return m
#####################################################
# Roles
#####################################################
def test_roles_retrieve(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_roles_update(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
role_data = RoleSerializer(data.public_project.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', public_url, role_data, users)
assert results == [401, 403, 403, 403, 200]
role_data = RoleSerializer(data.private_project1.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', private1_url, role_data, users)
assert results == [401, 403, 403, 403, 200]
role_data = RoleSerializer(data.private_project2.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', private2_url, role_data, users)
assert results == [401, 403, 403, 403, 200]
role_data = RoleSerializer(data.blocked_project.roles.all()[0]).data
role_data["name"] = "test"
role_data = json.dumps(role_data)
results = helper_test_http_method(client, 'put', blocked_url, role_data, users)
assert results == [401, 403, 403, 403, 451]
def test_roles_delete(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_roles_list(client, data):
url = reverse('roles-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 3
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 3
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 3
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 7
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 7
assert response.status_code == 200
def test_roles_patch(client, data):
public_url = reverse('roles-detail', kwargs={"pk": data.public_project.roles.all()[0].pk})
private1_url = reverse('roles-detail', kwargs={"pk": data.private_project1.roles.all()[0].pk})
private2_url = reverse('roles-detail', kwargs={"pk": data.private_project2.roles.all()[0].pk})
blocked_url = reverse('roles-detail', kwargs={"pk": data.blocked_project.roles.all()[0].pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Epic Status
#####################################################
def test_epic_status_retrieve(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_epic_status_update(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
epic_status_data = serializers.EpicStatusSerializer(data.public_epic_status).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', public_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 200]
epic_status_data = serializers.EpicStatusSerializer(data.private_epic_status1).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', private1_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 200]
epic_status_data = serializers.EpicStatusSerializer(data.private_epic_status2).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', private2_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 200]
epic_status_data = serializers.EpicStatusSerializer(data.blocked_epic_status).data
epic_status_data["name"] = "test"
epic_status_data = json.dumps(epic_status_data)
results = helper_test_http_method(client, 'put', blocked_url, epic_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_status_delete(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_epic_status_list(client, data):
url = reverse('epic-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_epic_status_patch(client, data):
public_url = reverse('epic-statuses-detail', kwargs={"pk": data.public_epic_status.pk})
private1_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status1.pk})
private2_url = reverse('epic-statuses-detail', kwargs={"pk": data.private_epic_status2.pk})
blocked_url = reverse('epic-statuses-detail', kwargs={"pk": data.blocked_epic_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_epic_status_action_bulk_update_order(client, data):
url = reverse('epic-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_epic_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Points
#####################################################
def test_points_retrieve(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_points_update(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
points_data = serializers.PointsSerializer(data.public_points).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', public_url, points_data, users)
assert results == [401, 403, 403, 403, 200]
points_data = serializers.PointsSerializer(data.private_points1).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', private1_url, points_data, users)
assert results == [401, 403, 403, 403, 200]
points_data = serializers.PointsSerializer(data.private_points2).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', private2_url, points_data, users)
assert results == [401, 403, 403, 403, 200]
points_data = serializers.PointsSerializer(data.blocked_points).data
points_data["name"] = "test"
points_data = json.dumps(points_data)
results = helper_test_http_method(client, 'put', blocked_url, points_data, users)
assert results == [401, 403, 403, 403, 451]
def test_points_delete(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_points_list(client, data):
url = reverse('points-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_points_patch(client, data):
public_url = reverse('points-detail', kwargs={"pk": data.public_points.pk})
private1_url = reverse('points-detail', kwargs={"pk": data.private_points1.pk})
private2_url = reverse('points-detail', kwargs={"pk": data.private_points2.pk})
blocked_url = reverse('points-detail', kwargs={"pk": data.blocked_points.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_points_action_bulk_update_order(client, data):
url = reverse('points-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_points": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# User Story Status
#####################################################
def test_user_story_status_retrieve(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_user_story_status_update(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
user_story_status_data = serializers.UserStoryStatusSerializer(data.public_user_story_status).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', public_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 200]
user_story_status_data = serializers.UserStoryStatusSerializer(data.private_user_story_status1).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', private1_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 200]
user_story_status_data = serializers.UserStoryStatusSerializer(data.private_user_story_status2).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', private2_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 200]
user_story_status_data = serializers.UserStoryStatusSerializer(data.blocked_user_story_status).data
user_story_status_data["name"] = "test"
user_story_status_data = json.dumps(user_story_status_data)
results = helper_test_http_method(client, 'put', blocked_url, user_story_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_user_story_status_delete(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_user_story_status_list(client, data):
url = reverse('userstory-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_user_story_status_patch(client, data):
public_url = reverse('userstory-statuses-detail', kwargs={"pk": data.public_user_story_status.pk})
private1_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status1.pk})
private2_url = reverse('userstory-statuses-detail', kwargs={"pk": data.private_user_story_status2.pk})
blocked_url = reverse('userstory-statuses-detail', kwargs={"pk": data.blocked_user_story_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_user_story_status_action_bulk_update_order(client, data):
url = reverse('userstory-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_userstory_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Task Status
#####################################################
def test_task_status_retrieve(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_task_status_update(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
task_status_data = serializers.TaskStatusSerializer(data.public_task_status).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', public_url, task_status_data, users)
assert results == [401, 403, 403, 403, 200]
task_status_data = serializers.TaskStatusSerializer(data.private_task_status1).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', private1_url, task_status_data, users)
assert results == [401, 403, 403, 403, 200]
task_status_data = serializers.TaskStatusSerializer(data.private_task_status2).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', private2_url, task_status_data, users)
assert results == [401, 403, 403, 403, 200]
task_status_data = serializers.TaskStatusSerializer(data.blocked_task_status).data
task_status_data["name"] = "test"
task_status_data = json.dumps(task_status_data)
results = helper_test_http_method(client, 'put', blocked_url, task_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_task_status_delete(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_task_status_list(client, data):
url = reverse('task-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_task_status_patch(client, data):
public_url = reverse('task-statuses-detail', kwargs={"pk": data.public_task_status.pk})
private1_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status1.pk})
private2_url = reverse('task-statuses-detail', kwargs={"pk": data.private_task_status2.pk})
blocked_url = reverse('task-statuses-detail', kwargs={"pk": data.blocked_task_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_task_status_action_bulk_update_order(client, data):
url = reverse('task-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_task_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Issue Status
#####################################################
def test_issue_status_retrieve(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_issue_status_update(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
issue_status_data = serializers.IssueStatusSerializer(data.public_issue_status).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', public_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 200]
issue_status_data = serializers.IssueStatusSerializer(data.private_issue_status1).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', private1_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 200]
issue_status_data = serializers.IssueStatusSerializer(data.private_issue_status2).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', private2_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 200]
issue_status_data = serializers.IssueStatusSerializer(data.blocked_issue_status).data
issue_status_data["name"] = "test"
issue_status_data = json.dumps(issue_status_data)
results = helper_test_http_method(client, 'put', blocked_url, issue_status_data, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_status_delete(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_status_list(client, data):
url = reverse('issue-statuses-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_issue_status_patch(client, data):
public_url = reverse('issue-statuses-detail', kwargs={"pk": data.public_issue_status.pk})
private1_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status1.pk})
private2_url = reverse('issue-statuses-detail', kwargs={"pk": data.private_issue_status2.pk})
blocked_url = reverse('issue-statuses-detail', kwargs={"pk": data.blocked_issue_status.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_issue_status_action_bulk_update_order(client, data):
url = reverse('issue-statuses-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_statuses": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Issue Type
#####################################################
def test_issue_type_retrieve(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_issue_type_update(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
issue_type_data = serializers.IssueTypeSerializer(data.public_issue_type).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', public_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 200]
issue_type_data = serializers.IssueTypeSerializer(data.private_issue_type1).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', private1_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 200]
issue_type_data = serializers.IssueTypeSerializer(data.private_issue_type2).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', private2_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 200]
issue_type_data = serializers.IssueTypeSerializer(data.blocked_issue_type).data
issue_type_data["name"] = "test"
issue_type_data = json.dumps(issue_type_data)
results = helper_test_http_method(client, 'put', blocked_url, issue_type_data, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_type_delete(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_issue_type_list(client, data):
url = reverse('issue-types-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_issue_type_patch(client, data):
public_url = reverse('issue-types-detail', kwargs={"pk": data.public_issue_type.pk})
private1_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type1.pk})
private2_url = reverse('issue-types-detail', kwargs={"pk": data.private_issue_type2.pk})
blocked_url = reverse('issue-types-detail', kwargs={"pk": data.blocked_issue_type.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_issue_type_action_bulk_update_order(client, data):
url = reverse('issue-types-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_issue_types": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Priority
#####################################################
def test_priority_retrieve(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_priority_update(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
priority_data = serializers.PrioritySerializer(data.public_priority).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', public_url, priority_data, users)
assert results == [401, 403, 403, 403, 200]
priority_data = serializers.PrioritySerializer(data.private_priority1).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', private1_url, priority_data, users)
assert results == [401, 403, 403, 403, 200]
priority_data = serializers.PrioritySerializer(data.private_priority2).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', private2_url, priority_data, users)
assert results == [401, 403, 403, 403, 200]
priority_data = serializers.PrioritySerializer(data.blocked_priority).data
priority_data["name"] = "test"
priority_data = json.dumps(priority_data)
results = helper_test_http_method(client, 'put', blocked_url, priority_data, users)
assert results == [401, 403, 403, 403, 451]
def test_priority_delete(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_priority_list(client, data):
url = reverse('priorities-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_priority_patch(client, data):
public_url = reverse('priorities-detail', kwargs={"pk": data.public_priority.pk})
private1_url = reverse('priorities-detail', kwargs={"pk": data.private_priority1.pk})
private2_url = reverse('priorities-detail', kwargs={"pk": data.private_priority2.pk})
blocked_url = reverse('priorities-detail', kwargs={"pk": data.blocked_priority.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_priority_action_bulk_update_order(client, data):
url = reverse('priorities-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_priorities": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Severity
#####################################################
def test_severity_retrieve(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_severity_update(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
severity_data = serializers.SeveritySerializer(data.public_severity).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', public_url, severity_data, users)
assert results == [401, 403, 403, 403, 200]
severity_data = serializers.SeveritySerializer(data.private_severity1).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', private1_url, severity_data, users)
assert results == [401, 403, 403, 403, 200]
severity_data = serializers.SeveritySerializer(data.private_severity2).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', private2_url, severity_data, users)
assert results == [401, 403, 403, 403, 200]
severity_data = serializers.SeveritySerializer(data.blocked_severity).data
severity_data["name"] = "test"
severity_data = json.dumps(severity_data)
results = helper_test_http_method(client, 'put', blocked_url, severity_data, users)
assert results == [401, 403, 403, 403, 451]
def test_severity_delete(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_severity_list(client, data):
url = reverse('severities-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 2
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 4
assert response.status_code == 200
def test_severity_patch(client, data):
public_url = reverse('severities-detail', kwargs={"pk": data.public_severity.pk})
private1_url = reverse('severities-detail', kwargs={"pk": data.private_severity1.pk})
private2_url = reverse('severities-detail', kwargs={"pk": data.private_severity2.pk})
blocked_url = reverse('severities-detail', kwargs={"pk": data.blocked_severity.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_severity_action_bulk_update_order(client, data):
url = reverse('severities-bulk-update-order')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.public_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.private_project1.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.private_project2.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 204]
post_data = json.dumps({
"bulk_severities": [(1, 2)],
"project": data.blocked_project.pk
})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 451]
#####################################################
# Memberships
#####################################################
def test_membership_retrieve(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'get', public_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private1_url, None, users)
assert results == [200, 200, 200, 200, 200]
results = helper_test_http_method(client, 'get', private2_url, None, users)
assert results == [401, 403, 403, 200, 200]
results = helper_test_http_method(client, 'get', blocked_url, None, users)
assert results == [401, 403, 403, 200, 200]
def test_membership_update(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
membership_data = serializers.MembershipSerializer(data.public_membership).data
membership_data["token"] = "test"
membership_data["username"] = data.public_membership.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', public_url, membership_data, users)
assert results == [401, 403, 403, 403, 200]
membership_data = serializers.MembershipSerializer(data.private_membership1).data
membership_data["token"] = "test"
membership_data["username"] = data.private_membership1.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', private1_url, membership_data, users)
assert results == [401, 403, 403, 403, 200]
membership_data = serializers.MembershipSerializer(data.private_membership2).data
membership_data["token"] = "test"
membership_data["username"] = data.private_membership2.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', private2_url, membership_data, users)
assert results == [401, 403, 403, 403, 200]
membership_data = serializers.MembershipSerializer(data.blocked_membership).data
membership_data["token"] = "test"
membership_data["username"] = data.blocked_membership.user.email
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'put', blocked_url, membership_data, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_delete(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'delete', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', private2_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'delete', blocked_url, None, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_list(client, data):
url = reverse('memberships-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 5
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 5
assert response.status_code == 200
client.login(data.project_member_without_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 5
assert response.status_code == 200
client.login(data.project_member_with_perms)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 11
assert response.status_code == 200
client.login(data.project_owner)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 11
assert response.status_code == 200
def test_membership_patch(client, data):
public_url = reverse('memberships-detail', kwargs={"pk": data.public_membership.pk})
private1_url = reverse('memberships-detail', kwargs={"pk": data.private_membership1.pk})
private2_url = reverse('memberships-detail', kwargs={"pk": data.private_membership2.pk})
blocked_url = reverse('memberships-detail', kwargs={"pk": data.blocked_membership.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'patch', public_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private1_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', private2_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 200]
results = helper_test_http_method(client, 'patch', blocked_url, '{"name": "Test"}', users)
assert results == [401, 403, 403, 403, 451]
def test_membership_create(client, data):
url = reverse('memberships-list')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
membership_data = serializers.MembershipSerializer(data.public_membership).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test1@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 201]
membership_data = serializers.MembershipSerializer(data.private_membership1).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test2@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 201]
membership_data = serializers.MembershipSerializer(data.private_membership2).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test3@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 201]
membership_data = serializers.MembershipSerializer(data.blocked_membership).data
del(membership_data["id"])
del(membership_data["user"])
membership_data["username"] = "test4@test.com"
membership_data = json.dumps(membership_data)
results = helper_test_http_method(client, 'post', url, membership_data, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_action_bulk_create(client, data):
url = reverse('memberships-bulk-create')
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
bulk_data = {
"project_id": data.public_project.id,
"bulk_memberships": [
{"role_id": data.public_membership.role.pk, "username": "test1@test.com"},
{"role_id": data.public_membership.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 200]
bulk_data = {
"project_id": data.private_project1.id,
"bulk_memberships": [
{"role_id": data.private_membership1.role.pk, "username": "test1@test.com"},
{"role_id": data.private_membership1.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 200]
bulk_data = {
"project_id": data.private_project2.id,
"bulk_memberships": [
{"role_id": data.private_membership2.role.pk, "username": "test1@test.com"},
{"role_id": data.private_membership2.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 200]
bulk_data = {
"project_id": data.blocked_project.id,
"bulk_memberships": [
{"role_id": data.blocked_membership.role.pk, "username": "test1@test.com"},
{"role_id": data.blocked_membership.role.pk, "username": "test2@test.com"},
]
}
bulk_data = json.dumps(bulk_data)
results = helper_test_http_method(client, 'post', url, bulk_data, users)
assert results == [401, 403, 403, 403, 451]
def test_membership_action_resend_invitation(client, data):
public_invitation = f.InvitationFactory(project=data.public_project, role__project=data.public_project)
private_invitation1 = f.InvitationFactory(project=data.private_project1, role__project=data.private_project1)
private_invitation2 = f.InvitationFactory(project=data.private_project2, role__project=data.private_project2)
blocked_invitation = f.InvitationFactory(project=data.blocked_project, role__project=data.blocked_project)
public_url = reverse('memberships-resend-invitation', kwargs={"pk": public_invitation.pk})
private1_url = reverse('memberships-resend-invitation', kwargs={"pk": private_invitation1.pk})
private2_url = reverse('memberships-resend-invitation', kwargs={"pk": private_invitation2.pk})
blocked_url = reverse('memberships-resend-invitation', kwargs={"pk": blocked_invitation.pk})
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
results = helper_test_http_method(client, 'post', public_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'post', private1_url, None, users)
assert results == [401, 403, 403, 403, 204]
results = helper_test_http_method(client, 'post', private2_url, None, users)
assert results == [404, 404, 404, 403, 204]
results = helper_test_http_method(client, 'post', blocked_url, None, users)
assert results == [404, 404, 404, 403, 451]
#####################################################
# Project Templates
#####################################################
def test_project_template_retrieve(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
results = helper_test_http_method(client, 'get', url, None, users)
assert results == [200, 200, 200]
def test_project_template_update(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
project_template_data = serializers.ProjectTemplateSerializer(data.project_template).data
project_template_data["default_owner_role"] = "test"
project_template_data = json.dumps(project_template_data)
results = helper_test_http_method(client, 'put', url, project_template_data, users)
assert results == [401, 403, 200]
def test_project_template_delete(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
results = helper_test_http_method(client, 'delete', url, None, users)
assert results == [401, 403, 204]
def test_project_template_list(client, data):
url = reverse('project-templates-list')
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 1
assert response.status_code == 200
client.login(data.registered_user)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 1
assert response.status_code == 200
client.login(data.superuser)
response = client.get(url)
projects_data = json.loads(response.content.decode('utf-8'))
assert len(projects_data) == 1
assert response.status_code == 200
def test_project_template_patch(client, data):
url = reverse('project-templates-detail', kwargs={"pk": data.project_template.pk})
users = [
None,
data.registered_user,
data.superuser,
]
results = helper_test_http_method(client, 'patch', url, '{"name": "Test"}', users)
assert results == [401, 403, 200]
#####################################################
# Tags
#####################################################
def test_create_tag(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"tag": "testtest",
"color": "#123123"
})
url = reverse('projects-create-tag', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-create-tag', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-create-tag', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-create-tag', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
def test_edit_tag(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"from_tag": "tag1",
"to_tag": "renamedtag1",
"color": "#123123"
})
url = reverse('projects-edit-tag', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-edit-tag', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-edit-tag', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-edit-tag', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
def test_delete_tag(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"tag": "tag2",
})
url = reverse('projects-delete-tag', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-delete-tag', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-delete-tag', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-delete-tag', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
def test_mix_tags(client, data):
users = [
None,
data.registered_user,
data.project_member_without_perms,
data.project_member_with_perms,
data.project_owner
]
post_data = json.dumps({
"from_tags": ["tag1"],
"to_tag": "tag3"
})
url = reverse('projects-mix-tags', kwargs={"pk": data.public_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-mix-tags', kwargs={"pk": data.private_project1.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [401, 403, 403, 403, 200]
url = reverse('projects-mix-tags', kwargs={"pk": data.private_project2.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 200]
url = reverse('projects-mix-tags', kwargs={"pk": data.blocked_project.pk})
results = helper_test_http_method(client, 'post', url, post_data, users)
assert results == [404, 404, 404, 403, 451]
| agpl-3.0 |
ArmstrongYang/StudyShare | Python/PythonTest.py | 6 | 1104 | import unittest
class DictforTest(dict):
def __init__(self, **kw):
super(DictforTest, self).__init__(**kw)
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(r"'Dict' object has no attribute '%s'" % key)
def __setattr__(self, key, value):
self[key] = value
class TestDict(unittest.TestCase):
def test_init(self):
d = DictforTest(a=1, b='test')
self.assertEqual(d.a, 1)
self.assertEqual(d.b, 'test')
self.assertTrue(isinstance(d, dict))
def test_key(self):
d = DictforTest()
d['key'] = 'value'
self.assertEqual(d.key, 'value')
def test_attr(self):
d = DictforTest()
d.key = 'value'
self.assertTrue('key' in d)
self.assertEqual(d['key'], 'value')
def test_keyerror(self):
d = DictforTest()
with self.assertRaises(KeyError):
value = d['empty']
def test_attrerror(self):
d = DictforTest()
with self.assertRaises(AttributeError):
value = d.empty
if __name__=='__main__':
unittest.main()
exit(0)
| apache-2.0 |
eul721/The-Perfect-Pokemon-Team-Balancer | libs/env/Lib/site-packages/pip/vcs/subversion.py | 473 | 10640 | import os
import re
from pip.backwardcompat import urlparse
from pip.index import Link
from pip.util import rmtree, display_path, call_subprocess
from pip.log import logger
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
bundle_file = 'svn-checkout.txt'
guide = ('# This was an svn checkout; to make it a checkout again run:\n'
'svn checkout --force -r %(rev)s %(url)s .\n')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
output = call_subprocess(
[self.cmd, 'info', location], show_stdout=False, extra_environ={'LANG': 'C'})
match = _svn_url_re.search(output)
if not match:
logger.warn('Cannot determine URL of svn checkout %s' % display_path(location))
logger.info('Output that cannot be parsed: \n%s' % output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warn('Cannot determine revision of svn checkout %s' % display_path(location))
logger.info('Output that cannot be parsed: \n%s' % output)
return url, None
return url, match.group(1)
def parse_vcs_bundle_file(self, content):
for line in content.splitlines():
if not line.strip() or line.strip().startswith('#'):
continue
match = re.search(r'^-r\s*([^ ])?', line)
if not match:
return None, None
rev = match.group(1)
rest = line[match.end():].strip().split(None, 1)[0]
return rest, rev
return None, None
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
logger.notify('Exporting svn repository %s to %s' % (url, location))
logger.indent += 2
try:
if os.path.exists(location):
# Subversion doesn't like to check out over an existing directory
# --force fixes this, but was only added in svn 1.5
rmtree(location)
call_subprocess(
[self.cmd, 'export'] + rev_options + [url, location],
filter_stdout=self._filter, show_stdout=False)
finally:
logger.indent -= 2
def switch(self, dest, url, rev_options):
call_subprocess(
[self.cmd, 'switch'] + rev_options + [url, dest])
def update(self, dest, rev_options):
call_subprocess(
[self.cmd, 'update'] + rev_options + [dest])
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
if rev:
rev_display = ' (to revision %s)' % rev
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Checking out %s%s to %s'
% (url, rev_display, display_path(dest)))
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options + [url, dest])
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
## FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if self.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries')
if not os.path.exists(entries_fn):
## FIXME: should we warn?
continue
dirurl, localrev = self._get_svn_url_rev(base)
if base == location:
base_url = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
def get_url_rev(self):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside setup.py
# we have to look up in the location until we find a real setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without finding setup.py
logger.warn("Could not find setup.py for directory %s (tried all parent directories)"
% orig_location)
return None
return self._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location):
from pip.exceptions import InstallationError
f = open(os.path.join(location, self.dirname, 'entries'))
data = f.read()
f.close()
if data.startswith('8') or data.startswith('9') or data.startswith('10'):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
xml = call_subprocess([self.cmd, 'info', '--xml', location], show_stdout=False)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
def get_tag_revs(self, svn_tag_url):
stdout = call_subprocess(
[self.cmd, 'ls', '-v', svn_tag_url], show_stdout=False)
results = []
for line in stdout.splitlines():
parts = line.split()
rev = int(parts[0])
tag = parts[-1].strip('/')
results.append((tag, rev))
return results
def find_tag_match(self, rev, tag_revs):
best_match_rev = None
best_tag = None
for tag, tag_rev in tag_revs:
if (tag_rev > rev and
(best_match_rev is None or best_match_rev > tag_rev)):
# FIXME: Is best_match > tag_rev really possible?
# or is it a sign something is wacky?
best_match_rev = tag_rev
best_tag = tag
return best_tag
def get_src_requirement(self, dist, location, find_tags=False):
repo = self.get_url(location)
if repo is None:
return None
parts = repo.split('/')
## FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
rev = self.get_revision(location)
if parts[-2] in ('tags', 'tag'):
# It's a tag, perfect!
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
elif parts[-2] in ('branches', 'branch'):
# It's a branch :(
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
elif parts[-1] == 'trunk':
# Trunk :-/
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
if find_tags:
tag_url = '/'.join(parts[:-1]) + '/tags'
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
logger.notify('trunk checkout %s seems to be equivalent to tag %s' % match)
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
logger.warn('svn URL does not fit normal structure (tags/branches/trunk): %s' % repo)
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
def get_rev_options(url, rev):
if rev:
rev_options = ['-r', rev]
else:
rev_options = []
r = urlparse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
if username:
rev_options += ['--username', username]
if password:
rev_options += ['--password', password]
return rev_options
vcs.register(Subversion)
| gpl-2.0 |
GoogleCloudPlatform/declarative-resource-client-library | python/services/iam/role.py | 1 | 11094 | # Copyright 2021 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from connector import channel
from google3.cloud.graphite.mmv2.services.google.iam import role_pb2
from google3.cloud.graphite.mmv2.services.google.iam import role_pb2_grpc
from typing import List
class Role(object):
def __init__(
self,
name: str = None,
title: str = None,
description: str = None,
localized_values: dict = None,
lifecycle_phase: str = None,
group_name: str = None,
group_title: str = None,
included_permissions: list = None,
stage: str = None,
etag: str = None,
deleted: bool = None,
included_roles: list = None,
parent: str = None,
service_account_file: str = "",
):
channel.initialize()
self.name = name
self.title = title
self.description = description
self.localized_values = localized_values
self.lifecycle_phase = lifecycle_phase
self.group_name = group_name
self.group_title = group_title
self.included_permissions = included_permissions
self.stage = stage
self.etag = etag
self.deleted = deleted
self.included_roles = included_roles
self.parent = parent
self.service_account_file = service_account_file
def apply(self):
stub = role_pb2_grpc.IamRoleServiceStub(channel.Channel())
request = role_pb2.ApplyIamRoleRequest()
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.title):
request.resource.title = Primitive.to_proto(self.title)
if Primitive.to_proto(self.description):
request.resource.description = Primitive.to_proto(self.description)
if RoleLocalizedValues.to_proto(self.localized_values):
request.resource.localized_values.CopyFrom(
RoleLocalizedValues.to_proto(self.localized_values)
)
else:
request.resource.ClearField("localized_values")
if Primitive.to_proto(self.lifecycle_phase):
request.resource.lifecycle_phase = Primitive.to_proto(self.lifecycle_phase)
if Primitive.to_proto(self.group_name):
request.resource.group_name = Primitive.to_proto(self.group_name)
if Primitive.to_proto(self.group_title):
request.resource.group_title = Primitive.to_proto(self.group_title)
if Primitive.to_proto(self.included_permissions):
request.resource.included_permissions.extend(
Primitive.to_proto(self.included_permissions)
)
if RoleStageEnum.to_proto(self.stage):
request.resource.stage = RoleStageEnum.to_proto(self.stage)
if Primitive.to_proto(self.etag):
request.resource.etag = Primitive.to_proto(self.etag)
if Primitive.to_proto(self.deleted):
request.resource.deleted = Primitive.to_proto(self.deleted)
if Primitive.to_proto(self.included_roles):
request.resource.included_roles.extend(
Primitive.to_proto(self.included_roles)
)
if Primitive.to_proto(self.parent):
request.resource.parent = Primitive.to_proto(self.parent)
request.service_account_file = self.service_account_file
response = stub.ApplyIamRole(request)
self.name = Primitive.from_proto(response.name)
self.title = Primitive.from_proto(response.title)
self.description = Primitive.from_proto(response.description)
self.localized_values = RoleLocalizedValues.from_proto(
response.localized_values
)
self.lifecycle_phase = Primitive.from_proto(response.lifecycle_phase)
self.group_name = Primitive.from_proto(response.group_name)
self.group_title = Primitive.from_proto(response.group_title)
self.included_permissions = Primitive.from_proto(response.included_permissions)
self.stage = RoleStageEnum.from_proto(response.stage)
self.etag = Primitive.from_proto(response.etag)
self.deleted = Primitive.from_proto(response.deleted)
self.included_roles = Primitive.from_proto(response.included_roles)
self.parent = Primitive.from_proto(response.parent)
def delete(self):
stub = role_pb2_grpc.IamRoleServiceStub(channel.Channel())
request = role_pb2.DeleteIamRoleRequest()
request.service_account_file = self.service_account_file
if Primitive.to_proto(self.name):
request.resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.title):
request.resource.title = Primitive.to_proto(self.title)
if Primitive.to_proto(self.description):
request.resource.description = Primitive.to_proto(self.description)
if RoleLocalizedValues.to_proto(self.localized_values):
request.resource.localized_values.CopyFrom(
RoleLocalizedValues.to_proto(self.localized_values)
)
else:
request.resource.ClearField("localized_values")
if Primitive.to_proto(self.lifecycle_phase):
request.resource.lifecycle_phase = Primitive.to_proto(self.lifecycle_phase)
if Primitive.to_proto(self.group_name):
request.resource.group_name = Primitive.to_proto(self.group_name)
if Primitive.to_proto(self.group_title):
request.resource.group_title = Primitive.to_proto(self.group_title)
if Primitive.to_proto(self.included_permissions):
request.resource.included_permissions.extend(
Primitive.to_proto(self.included_permissions)
)
if RoleStageEnum.to_proto(self.stage):
request.resource.stage = RoleStageEnum.to_proto(self.stage)
if Primitive.to_proto(self.etag):
request.resource.etag = Primitive.to_proto(self.etag)
if Primitive.to_proto(self.deleted):
request.resource.deleted = Primitive.to_proto(self.deleted)
if Primitive.to_proto(self.included_roles):
request.resource.included_roles.extend(
Primitive.to_proto(self.included_roles)
)
if Primitive.to_proto(self.parent):
request.resource.parent = Primitive.to_proto(self.parent)
response = stub.DeleteIamRole(request)
@classmethod
def list(self, parent, service_account_file=""):
stub = role_pb2_grpc.IamRoleServiceStub(channel.Channel())
request = role_pb2.ListIamRoleRequest()
request.service_account_file = service_account_file
request.Parent = parent
return stub.ListIamRole(request).items
def to_proto(self):
resource = role_pb2.IamRole()
if Primitive.to_proto(self.name):
resource.name = Primitive.to_proto(self.name)
if Primitive.to_proto(self.title):
resource.title = Primitive.to_proto(self.title)
if Primitive.to_proto(self.description):
resource.description = Primitive.to_proto(self.description)
if RoleLocalizedValues.to_proto(self.localized_values):
resource.localized_values.CopyFrom(
RoleLocalizedValues.to_proto(self.localized_values)
)
else:
resource.ClearField("localized_values")
if Primitive.to_proto(self.lifecycle_phase):
resource.lifecycle_phase = Primitive.to_proto(self.lifecycle_phase)
if Primitive.to_proto(self.group_name):
resource.group_name = Primitive.to_proto(self.group_name)
if Primitive.to_proto(self.group_title):
resource.group_title = Primitive.to_proto(self.group_title)
if Primitive.to_proto(self.included_permissions):
resource.included_permissions.extend(
Primitive.to_proto(self.included_permissions)
)
if RoleStageEnum.to_proto(self.stage):
resource.stage = RoleStageEnum.to_proto(self.stage)
if Primitive.to_proto(self.etag):
resource.etag = Primitive.to_proto(self.etag)
if Primitive.to_proto(self.deleted):
resource.deleted = Primitive.to_proto(self.deleted)
if Primitive.to_proto(self.included_roles):
resource.included_roles.extend(Primitive.to_proto(self.included_roles))
if Primitive.to_proto(self.parent):
resource.parent = Primitive.to_proto(self.parent)
return resource
class RoleLocalizedValues(object):
def __init__(self, localized_title: str = None, localized_description: str = None):
self.localized_title = localized_title
self.localized_description = localized_description
@classmethod
def to_proto(self, resource):
if not resource:
return None
res = role_pb2.IamRoleLocalizedValues()
if Primitive.to_proto(resource.localized_title):
res.localized_title = Primitive.to_proto(resource.localized_title)
if Primitive.to_proto(resource.localized_description):
res.localized_description = Primitive.to_proto(
resource.localized_description
)
return res
@classmethod
def from_proto(self, resource):
if not resource:
return None
return RoleLocalizedValues(
localized_title=Primitive.from_proto(resource.localized_title),
localized_description=Primitive.from_proto(resource.localized_description),
)
class RoleLocalizedValuesArray(object):
@classmethod
def to_proto(self, resources):
if not resources:
return resources
return [RoleLocalizedValues.to_proto(i) for i in resources]
@classmethod
def from_proto(self, resources):
return [RoleLocalizedValues.from_proto(i) for i in resources]
class RoleStageEnum(object):
@classmethod
def to_proto(self, resource):
if not resource:
return resource
return role_pb2.IamRoleStageEnum.Value("IamRoleStageEnum%s" % resource)
@classmethod
def from_proto(self, resource):
if not resource:
return resource
return role_pb2.IamRoleStageEnum.Name(resource)[len("IamRoleStageEnum") :]
class Primitive(object):
@classmethod
def to_proto(self, s):
if not s:
return ""
return s
@classmethod
def from_proto(self, s):
return s
| apache-2.0 |
zadarastorage/zadarapy | zadarapy/vpsaos/drives.py | 1 | 2669 | # Copyright 2019 Zadara Storage, Inc.
# Originally authored by Jeremy Brown - https://github.com/jwbrown77
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from zadarapy.validators import verify_start_limit, \
verify_volume_id
def get_all_drives(session, start=None, limit=None, return_type=None,
**kwargs):
"""
Retrieves details for all drives for the VPSAOS.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type start: int
:param start: The offset to start displaying drives from. Optional.
:type: limit: int
:param limit: The maximum number of drives to return. Optional.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
parameters = verify_start_limit(start, limit)
path = '/api/zios/drives.json'
return session.get_api(path=path, parameters=parameters,
return_type=return_type, **kwargs)
def get_one_drive(session, name, return_type=None, **kwargs):
"""
Retrieves details for a single drive for the VPSAOS.
:type session: zadarapy.session.Session
:param session: A valid zadarapy.session.Session object. Required.
:type name: str
:param name: The 'name' value as returned by
get_all_drives. Required.
:type return_type: str
:param return_type: If this is set to the string 'json', this function
will return a JSON string. Otherwise, it will return a Python
dictionary. Optional (will return a Python dictionary by default).
:rtype: dict, str
:returns: A dictionary or JSON data set as a string depending on
return_type parameter.
"""
verify_volume_id(name)
path = '/api/zios/drives/{0}.json'.format(name)
return session.get_api(path=path, return_type=return_type, **kwargs)
| apache-2.0 |
yask123/scikit-learn | benchmarks/bench_plot_incremental_pca.py | 374 | 6430 | """
========================
IncrementalPCA benchmark
========================
Benchmarks for IncrementalPCA
"""
import numpy as np
import gc
from time import time
from collections import defaultdict
import matplotlib.pyplot as plt
from sklearn.datasets import fetch_lfw_people
from sklearn.decomposition import IncrementalPCA, RandomizedPCA, PCA
def plot_results(X, y, label):
plt.plot(X, y, label=label, marker='o')
def benchmark(estimator, data):
gc.collect()
print("Benching %s" % estimator)
t0 = time()
estimator.fit(data)
training_time = time() - t0
data_t = estimator.transform(data)
data_r = estimator.inverse_transform(data_t)
reconstruction_error = np.mean(np.abs(data - data_r))
return {'time': training_time, 'error': reconstruction_error}
def plot_feature_times(all_times, batch_size, all_components, data):
plt.figure()
plot_results(all_components, all_times['pca'], label="PCA")
plot_results(all_components, all_times['ipca'],
label="IncrementalPCA, bsize=%i" % batch_size)
plot_results(all_components, all_times['rpca'], label="RandomizedPCA")
plt.legend(loc="upper left")
plt.suptitle("Algorithm runtime vs. n_components\n \
LFW, size %i x %i" % data.shape)
plt.xlabel("Number of components (out of max %i)" % data.shape[1])
plt.ylabel("Time (seconds)")
def plot_feature_errors(all_errors, batch_size, all_components, data):
plt.figure()
plot_results(all_components, all_errors['pca'], label="PCA")
plot_results(all_components, all_errors['ipca'],
label="IncrementalPCA, bsize=%i" % batch_size)
plot_results(all_components, all_errors['rpca'], label="RandomizedPCA")
plt.legend(loc="lower left")
plt.suptitle("Algorithm error vs. n_components\n"
"LFW, size %i x %i" % data.shape)
plt.xlabel("Number of components (out of max %i)" % data.shape[1])
plt.ylabel("Mean absolute error")
def plot_batch_times(all_times, n_features, all_batch_sizes, data):
plt.figure()
plot_results(all_batch_sizes, all_times['pca'], label="PCA")
plot_results(all_batch_sizes, all_times['rpca'], label="RandomizedPCA")
plot_results(all_batch_sizes, all_times['ipca'], label="IncrementalPCA")
plt.legend(loc="lower left")
plt.suptitle("Algorithm runtime vs. batch_size for n_components %i\n \
LFW, size %i x %i" % (
n_features, data.shape[0], data.shape[1]))
plt.xlabel("Batch size")
plt.ylabel("Time (seconds)")
def plot_batch_errors(all_errors, n_features, all_batch_sizes, data):
plt.figure()
plot_results(all_batch_sizes, all_errors['pca'], label="PCA")
plot_results(all_batch_sizes, all_errors['ipca'], label="IncrementalPCA")
plt.legend(loc="lower left")
plt.suptitle("Algorithm error vs. batch_size for n_components %i\n \
LFW, size %i x %i" % (
n_features, data.shape[0], data.shape[1]))
plt.xlabel("Batch size")
plt.ylabel("Mean absolute error")
def fixed_batch_size_comparison(data):
all_features = [i.astype(int) for i in np.linspace(data.shape[1] // 10,
data.shape[1], num=5)]
batch_size = 1000
# Compare runtimes and error for fixed batch size
all_times = defaultdict(list)
all_errors = defaultdict(list)
for n_components in all_features:
pca = PCA(n_components=n_components)
rpca = RandomizedPCA(n_components=n_components, random_state=1999)
ipca = IncrementalPCA(n_components=n_components, batch_size=batch_size)
results_dict = {k: benchmark(est, data) for k, est in [('pca', pca),
('ipca', ipca),
('rpca', rpca)]}
for k in sorted(results_dict.keys()):
all_times[k].append(results_dict[k]['time'])
all_errors[k].append(results_dict[k]['error'])
plot_feature_times(all_times, batch_size, all_features, data)
plot_feature_errors(all_errors, batch_size, all_features, data)
def variable_batch_size_comparison(data):
batch_sizes = [i.astype(int) for i in np.linspace(data.shape[0] // 10,
data.shape[0], num=10)]
for n_components in [i.astype(int) for i in
np.linspace(data.shape[1] // 10,
data.shape[1], num=4)]:
all_times = defaultdict(list)
all_errors = defaultdict(list)
pca = PCA(n_components=n_components)
rpca = RandomizedPCA(n_components=n_components, random_state=1999)
results_dict = {k: benchmark(est, data) for k, est in [('pca', pca),
('rpca', rpca)]}
# Create flat baselines to compare the variation over batch size
all_times['pca'].extend([results_dict['pca']['time']] *
len(batch_sizes))
all_errors['pca'].extend([results_dict['pca']['error']] *
len(batch_sizes))
all_times['rpca'].extend([results_dict['rpca']['time']] *
len(batch_sizes))
all_errors['rpca'].extend([results_dict['rpca']['error']] *
len(batch_sizes))
for batch_size in batch_sizes:
ipca = IncrementalPCA(n_components=n_components,
batch_size=batch_size)
results_dict = {k: benchmark(est, data) for k, est in [('ipca',
ipca)]}
all_times['ipca'].append(results_dict['ipca']['time'])
all_errors['ipca'].append(results_dict['ipca']['error'])
plot_batch_times(all_times, n_components, batch_sizes, data)
# RandomizedPCA error is always worse (approx 100x) than other PCA
# tests
plot_batch_errors(all_errors, n_components, batch_sizes, data)
faces = fetch_lfw_people(resize=.2, min_faces_per_person=5)
# limit dataset to 5000 people (don't care who they are!)
X = faces.data[:5000]
n_samples, h, w = faces.images.shape
n_features = X.shape[1]
X -= X.mean(axis=0)
X /= X.std(axis=0)
fixed_batch_size_comparison(X)
variable_batch_size_comparison(X)
plt.show()
| bsd-3-clause |
zstackio/zstack-woodpecker | integrationtest/vm/virtualrouter/vip/test_multi_lbs_with_vip.py | 2 | 2982 | '''
Test load balance.
Test step:
1. Create 2 VM with load balance l3 network service.
2. Create a LB with 2 VMs' nic
3. Check the LB
4. Destroy VMs
@author: czhou25
'''
import os
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.zstack_test.zstack_test_load_balancer \
as zstack_lb_header
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def test():
test_util.test_dsc('Create test vm with lb.')
vm1 = test_stub.create_lb_vm()
test_obj_dict.add_vm(vm1)
vm2 = test_stub.create_lb_vm()
test_obj_dict.add_vm(vm2)
#l3_name = os.environ.get('l3VlanNetworkName1')
#vr1 = test_stub.get_vr_by_private_l3_name(l3_name)
#l3_name = os.environ.get('l3NoVlanNetworkName1')
#vr2 = test_stub.get_vr_by_private_l3_name(l3_name)
vm_nic1 = vm1.get_vm().vmNics[0]
vm_nic1_uuid = vm_nic1.uuid
vm_nic1_ip = vm_nic1.ip
vm_nic2 = vm2.get_vm().vmNics[0]
vm_nic2_uuid = vm_nic2.uuid
vm_nic2_ip = vm_nic2.ip
vm1.check()
vm2.check()
#test_lib.lib_wait_target_up(vm_nic1_ip, "root", 120)
#test_lib.lib_wait_target_up(vm_nic2_ip, "root", 120)
test_stub.set_httpd_in_vm(vm_nic1_ip, "root", "password")
test_stub.set_httpd_in_vm(vm_nic2_ip, "root", "password")
pri_l3_uuid = vm_nic1.l3NetworkUuid
vr = test_lib.lib_find_vr_by_l3_uuid(pri_l3_uuid)[0]
vr_pub_nic = test_lib.lib_find_vr_pub_nic(vr)
l3_uuid = vr_pub_nic.l3NetworkUuid
vip = test_stub.create_vip('vip_for_lb_test', l3_uuid)
test_obj_dict.add_vip(vip)
lb = zstack_lb_header.ZstackTestLoadBalancer()
lb2 = zstack_lb_header.ZstackTestLoadBalancer()
lb.create('create lb test', vip.get_vip().uuid)
lb2.create('create lb2 test', vip.get_vip().uuid)
test_obj_dict.add_load_balancer(lb)
test_obj_dict.add_load_balancer(lb2)
vip.attach_lb(lb)
vip.attach_lb(lb2)
lb_creation_option = test_lib.lib_create_lb_listener_option()
lb2_creation_option = test_lib.lib_create_lb_listener_option(lbl_port = 2222, lbi_port = 80)
lbl = lb.create_listener(lb_creation_option)
lbl2 = lb2.create_listener(lb2_creation_option)
lbl.add_nics([vm_nic1_uuid, vm_nic2_uuid])
lbl2.add_nics([vm_nic1_uuid, vm_nic2_uuid])
vm1.check()
vm2.check()
lb.check()
lb2.check()
vip.check()
lb.delete()
lb2.delete()
vip.delete()
test_obj_dict.rm_vip(vip)
test_obj_dict.rm_load_balancer(lb)
test_obj_dict.rm_load_balancer(lb2)
lb.check()
lb2.check()
test_lib.lib_robot_cleanup(test_obj_dict)
test_util.test_pass('Create Load Balancer Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
test_lib.lib_error_cleanup(test_obj_dict)
| apache-2.0 |
caneruguz/osf.io | scripts/tests/test_migrate_manual_merged_user.py | 61 | 1583 | from tests.base import OsfTestCase
from tests.factories import UserFactory
from scripts.migrate_manual_merged_user import (
do_migration,
get_targets,
)
class TestMigrateManualMergedUser(OsfTestCase):
def test_get_targets(self):
user1 = UserFactory.build(merged_by=None)
user2 = UserFactory.build(merged_by=user1)
user3 = UserFactory.build()
user1.save()
user2.save()
user3.save()
user_list = get_targets()
assert user_list is not None
assert len(user_list) is 1
user1.merged_by = user3
user1.save()
user_list = get_targets()
assert len(user_list) is 2
def test_do_migration(self):
user1 = UserFactory.build(merged_by=None)
user2 = UserFactory.build(merged_by=user1, verification_key="key1")
user3 = UserFactory.build(merged_by=user1, verification_key="key2")
user2.email_verifications['token'] = {'email': 'test@example.com'}
user3.email_verifications['token'] = {'email': 'test@example.com'}
user1.save()
user2.save()
user3.save()
user_list = get_targets()
do_migration(user_list)
user2.reload()
user3.reload()
assert user2.username is None
assert user2.password is None
assert len(user2.email_verifications) is 0
assert user2.verification_key is None
assert user3.username is None
assert user3.password is None
assert len(user3.email_verifications) is 0
assert user3.verification_key is None
| apache-2.0 |
miguelpalacio/python-for-android | python-build/python-libs/gdata/src/gdata/media/__init__.py | 221 | 12093 | # -*-*- encoding: utf-8 -*-*-
#
# This is gdata.photos.media, implementing parts of the MediaRSS spec in gdata structures
#
# $Id: __init__.py 81 2007-10-03 14:41:42Z havard.gulldahl $
#
# Copyright 2007 Håvard Gulldahl
# Portions copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Essential attributes of photos in Google Photos/Picasa Web Albums are
expressed using elements from the `media' namespace, defined in the
MediaRSS specification[1].
Due to copyright issues, the elements herein are documented sparingly, please
consult with the Google Photos API Reference Guide[2], alternatively the
official MediaRSS specification[1] for details.
(If there is a version conflict between the two sources, stick to the
Google Photos API).
[1]: http://search.yahoo.com/mrss (version 1.1.1)
[2]: http://code.google.com/apis/picasaweb/reference.html#media_reference
Keep in mind that Google Photos only uses a subset of the MediaRSS elements
(and some of the attributes are trimmed down, too):
media:content
media:credit
media:description
media:group
media:keywords
media:thumbnail
media:title
"""
__author__ = u'havard@gulldahl.no'# (Håvard Gulldahl)' #BUG: api chokes on non-ascii chars in __author__
__license__ = 'Apache License v2'
import atom
import gdata
MEDIA_NAMESPACE = 'http://search.yahoo.com/mrss/'
YOUTUBE_NAMESPACE = 'http://gdata.youtube.com/schemas/2007'
class MediaBaseElement(atom.AtomBase):
"""Base class for elements in the MEDIA_NAMESPACE.
To add new elements, you only need to add the element tag name to self._tag
"""
_tag = ''
_namespace = MEDIA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.name = name
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Content(MediaBaseElement):
"""(attribute container) This element describes the original content,
e.g. an image or a video. There may be multiple Content elements
in a media:Group.
For example, a video may have a
<media:content medium="image"> element that specifies a JPEG
representation of the video, and a <media:content medium="video">
element that specifies the URL of the video itself.
Attributes:
url: non-ambigous reference to online object
width: width of the object frame, in pixels
height: width of the object frame, in pixels
medium: one of `image' or `video', allowing the api user to quickly
determine the object's type
type: Internet media Type[1] (a.k.a. mime type) of the object -- a more
verbose way of determining the media type. To set the type member
in the contructor, use the content_type parameter.
(optional) fileSize: the size of the object, in bytes
[1]: http://en.wikipedia.org/wiki/Internet_media_type
"""
_tag = 'content'
_attributes = atom.AtomBase._attributes.copy()
_attributes['url'] = 'url'
_attributes['width'] = 'width'
_attributes['height'] = 'height'
_attributes['medium'] = 'medium'
_attributes['type'] = 'type'
_attributes['fileSize'] = 'fileSize'
def __init__(self, url=None, width=None, height=None,
medium=None, content_type=None, fileSize=None, format=None,
extension_elements=None, extension_attributes=None, text=None):
MediaBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.url = url
self.width = width
self.height = height
self.medium = medium
self.type = content_type
self.fileSize = fileSize
def ContentFromString(xml_string):
return atom.CreateClassFromXMLString(Content, xml_string)
class Credit(MediaBaseElement):
"""(string) Contains the nickname of the user who created the content,
e.g. `Liz Bennet'.
This is a user-specified value that should be used when referring to
the user by name.
Note that none of the attributes from the MediaRSS spec are supported.
"""
_tag = 'credit'
def CreditFromString(xml_string):
return atom.CreateClassFromXMLString(Credit, xml_string)
class Description(MediaBaseElement):
"""(string) A description of the media object.
Either plain unicode text, or entity-encoded html (look at the `type'
attribute).
E.g `A set of photographs I took while vacationing in Italy.'
For `api' projections, the description is in plain text;
for `base' projections, the description is in HTML.
Attributes:
type: either `text' or `html'. To set the type member in the contructor,
use the description_type parameter.
"""
_tag = 'description'
_attributes = atom.AtomBase._attributes.copy()
_attributes['type'] = 'type'
def __init__(self, description_type=None,
extension_elements=None, extension_attributes=None, text=None):
MediaBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.type = description_type
def DescriptionFromString(xml_string):
return atom.CreateClassFromXMLString(Description, xml_string)
class Keywords(MediaBaseElement):
"""(string) Lists the tags associated with the entry,
e.g `italy, vacation, sunset'.
Contains a comma-separated list of tags that have been added to the photo, or
all tags that have been added to photos in the album.
"""
_tag = 'keywords'
def KeywordsFromString(xml_string):
return atom.CreateClassFromXMLString(Keywords, xml_string)
class Thumbnail(MediaBaseElement):
"""(attributes) Contains the URL of a thumbnail of a photo or album cover.
There can be multiple <media:thumbnail> elements for a given <media:group>;
for example, a given item may have multiple thumbnails at different sizes.
Photos generally have two thumbnails at different sizes;
albums generally have one cropped thumbnail.
If the thumbsize parameter is set to the initial query, this element points
to thumbnails of the requested sizes; otherwise the thumbnails are the
default thumbnail size.
This element must not be confused with the <gphoto:thumbnail> element.
Attributes:
url: The URL of the thumbnail image.
height: The height of the thumbnail image, in pixels.
width: The width of the thumbnail image, in pixels.
"""
_tag = 'thumbnail'
_attributes = atom.AtomBase._attributes.copy()
_attributes['url'] = 'url'
_attributes['width'] = 'width'
_attributes['height'] = 'height'
def __init__(self, url=None, width=None, height=None,
extension_attributes=None, text=None, extension_elements=None):
MediaBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.url = url
self.width = width
self.height = height
def ThumbnailFromString(xml_string):
return atom.CreateClassFromXMLString(Thumbnail, xml_string)
class Title(MediaBaseElement):
"""(string) Contains the title of the entry's media content, in plain text.
Attributes:
type: Always set to plain. To set the type member in the constructor, use
the title_type parameter.
"""
_tag = 'title'
_attributes = atom.AtomBase._attributes.copy()
_attributes['type'] = 'type'
def __init__(self, title_type=None,
extension_attributes=None, text=None, extension_elements=None):
MediaBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.type = title_type
def TitleFromString(xml_string):
return atom.CreateClassFromXMLString(Title, xml_string)
class Player(MediaBaseElement):
"""(string) Contains the embeddable player URL for the entry's media content
if the media is a video.
Attributes:
url: Always set to plain
"""
_tag = 'player'
_attributes = atom.AtomBase._attributes.copy()
_attributes['url'] = 'url'
def __init__(self, player_url=None,
extension_attributes=None, extension_elements=None):
MediaBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes)
self.url= player_url
class Private(atom.AtomBase):
"""The YouTube Private element"""
_tag = 'private'
_namespace = YOUTUBE_NAMESPACE
class Duration(atom.AtomBase):
"""The YouTube Duration element"""
_tag = 'duration'
_namespace = YOUTUBE_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['seconds'] = 'seconds'
class Category(MediaBaseElement):
"""The mediagroup:category element"""
_tag = 'category'
_attributes = atom.AtomBase._attributes.copy()
_attributes['term'] = 'term'
_attributes['scheme'] = 'scheme'
_attributes['label'] = 'label'
def __init__(self, term=None, scheme=None, label=None, text=None,
extension_elements=None, extension_attributes=None):
"""Constructor for Category
Args:
term: str
scheme: str
label: str
text: str The text data in the this element
extension_elements: list A list of ExtensionElement instances
extension_attributes: dict A dictionary of attribute value string pairs
"""
self.term = term
self.scheme = scheme
self.label = label
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Group(MediaBaseElement):
"""Container element for all media elements.
The <media:group> element can appear as a child of an album, photo or
video entry."""
_tag = 'group'
_children = atom.AtomBase._children.copy()
_children['{%s}content' % MEDIA_NAMESPACE] = ('content', [Content,])
_children['{%s}credit' % MEDIA_NAMESPACE] = ('credit', Credit)
_children['{%s}description' % MEDIA_NAMESPACE] = ('description', Description)
_children['{%s}keywords' % MEDIA_NAMESPACE] = ('keywords', Keywords)
_children['{%s}thumbnail' % MEDIA_NAMESPACE] = ('thumbnail', [Thumbnail,])
_children['{%s}title' % MEDIA_NAMESPACE] = ('title', Title)
_children['{%s}category' % MEDIA_NAMESPACE] = ('category', [Category,])
_children['{%s}duration' % YOUTUBE_NAMESPACE] = ('duration', Duration)
_children['{%s}private' % YOUTUBE_NAMESPACE] = ('private', Private)
_children['{%s}player' % MEDIA_NAMESPACE] = ('player', Player)
def __init__(self, content=None, credit=None, description=None, keywords=None,
thumbnail=None, title=None, duration=None, private=None,
category=None, player=None, extension_elements=None,
extension_attributes=None, text=None):
MediaBaseElement.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes,
text=text)
self.content=content
self.credit=credit
self.description=description
self.keywords=keywords
self.thumbnail=thumbnail or []
self.title=title
self.duration=duration
self.private=private
self.category=category or []
self.player=player
def GroupFromString(xml_string):
return atom.CreateClassFromXMLString(Group, xml_string)
| apache-2.0 |
github4ry/pathomx | pathomx/kernel_helpers.py | 2 | 3634 | import os
import sys
import numpy as np
import pandas as pd
import re
import io
from matplotlib.figure import Figure, AxesStack
from matplotlib.axes import Subplot
from mplstyler import StylesManager
import warnings
from . import displayobjects
from .utils import scriptdir, basedir
from IPython.core import display
from copy import deepcopy
MAGIC_TYPES = [
# Numpy
np.array, np.ndarray,
# Pandas
pd.Series, pd.DataFrame,
Figure, Subplot,
StylesManager,
# View types
displayobjects.Svg, displayobjects.Html, displayobjects.Markdown,
display.SVG
]
class PathomxTool(object):
''' Simple wrapper class that holds the output data for a given tool; This is for user-friendliness
not for use '''
def __str__(self):
return self._name
def __repr__(self):
return self._name
def __init__(self, name, *args, **kwargs):
self.__dict__.update(kwargs)
self._name = name
def pathomx_notebook_start(vars):
#for k, v in varsi.items():
# vars[k] = v
# _keep_input_vars = ['styles']
# vars['_pathomx_exclude_input_vars'] = [x for x in varsi.keys() if x not in _keep_input_vars]
# Handle IO magic
if '_io' in vars:
for k, v in vars['_io']['input'].items():
if v in vars:
vars[k] = deepcopy(vars[v])
else:
vars[k] = None
if '_rcParams' in vars:
global rcParams
from matplotlib import rcParams
# Block warnings from deprecated rcParams here
with warnings.catch_warnings():
warnings.simplefilter("ignore")
for k, v in vars['_rcParams'].items():
rcParams[k] = v
# Legacy shim
if '_styles' in vars:
vars['styles'] = vars['_styles']
def pathomx_notebook_stop(vars):
varso = {}
if '_io' in vars:
# Handle IO magic
for k, v in vars['_io']['output'].items():
if k in vars:
vars[v] = vars[k]
else:
vars[v] = None
for k, v in vars.items():
# Check it's an accepted type for passing; and not private (starts with _)
if not k.startswith('_') and \
not k in vars['_io']['input'].keys():
if type(v) in MAGIC_TYPES or k in vars['_pathomx_expected_output_vars']:
varso[k] = v
elif hasattr(v, '_repr_html_'):
try:
# Check if it is a bound method (not a class definition)
v._repr_html_()
except:
pass
else:
varso[k] = displayobjects.Html(v)
vars['varso'] = varso
def progress(progress):
''' Output the current progress to stdout on the remote core
this will be read from stdout and displayed in the UI '''
print("____pathomx_execute_progress_%.2f____" % progress)
class open_with_progress(io.IOBase):
def __init__(self, f, *args, **kwargs):
super(open_with_progress, self).__init__(f, *args, **kwargs)
self._fsize = os.path.getsize(f)
self._progress = None
def read(self, *args, **kwargs):
super(open_with_progress, self).read(*args, **kwargs)
self.check_and_emit_progress()
def check_and_emit_progress(self):
# We only output at 2dp so only emit when that changes
prg = round(self.tell() / self._fsize, 2)
if prg != self._progress:
self._progress = prg
progress(prg)
| gpl-3.0 |
sekikn/ambari | ambari-server/src/test/python/custom_actions/TestUpdateRepo.py | 2 | 5282 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import os, sys
from mock.mock import patch
from mock.mock import MagicMock
from unittest import TestCase
from resource_management import *
from resource_management import Script
from ambari_commons.os_check import OSCheck
from update_repo import UpdateRepo
class TestUpdateRepo(TestCase):
@patch.object(OSCheck, "is_suse_family")
@patch.object(OSCheck, "is_ubuntu_family")
@patch.object(OSCheck, "is_redhat_family")
@patch.object(Script, 'get_config')
@patch("resource_management.libraries.providers.repository.File")
@patch("resource_management.libraries.script.Script.put_structured_out")
@patch.object(System, "os_family", new='redhat')
def testUpdateRepo(self, structured_out_mock, file_mock, mock_config, is_redhat_mock, is_ubuntu_mock, is_suse_mock):
###### valid case
is_suse_mock.return_value = False
is_ubuntu_mock.return_value = False
is_redhat_mock.return_value = True
updateRepo = UpdateRepo()
mock_config.return_value = { "configurations": {
"cluster-env": {
"repo_suse_rhel_template": "REPO_SUSE_RHEL_TEST_TEMPLATE",
"repo_ubuntu_template": "REPO_UBUNTU_TEMPLATE"
}
},
"repositoryFile": {
"resolved": True,
"repoVersion": "2.4.3.0-227",
"repositories": [
{
"mirrorsList": None,
"ambariManaged": True,
"baseUrl": "http://public-repo-1.hortonworks.com/HDP/centos6/2.x/updates/2.4.3.0/",
"repoName": "HDP",
"components": None,
"osType": "redhat6",
"distribution": None,
"repoId": "HDP-2.4-repo-1"
},
{
"mirrorsList": None,
"ambariManaged": True,
"baseUrl": "http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.20/repos/centos6",
"repoName": "HDP-UTILS",
"components": None,
"osType": "redhat6",
"distribution": None,
"repoId": "HDP-UTILS-1.1.0.20-repo-1"
}
],
"feature": {
"m_isScoped": False,
"m_isPreInstalled": False
},
"stackName": "HDP",
"repoVersionId": 1
},
}
with Environment('/') as env:
updateRepo.actionexecute(None)
self.assertTrue(file_mock.called)
self.assertEquals(file_mock.call_args[0][0], "/etc/yum.repos.d/HDP.repo")
self.assertEquals(structured_out_mock.call_args[0][0], {'repo_update': {'message': 'Repository files successfully updated!', 'exit_code': 0}})
###### invalid repo info
file_mock.reset_mock()
failed = False
mock_config.return_value = { "configurations": {
"clugit ster-env": {
"repo_suse_rhel_template": "REPO_SUSE_RHEL_TEST_TEMPLATE",
"repo_ubuntu_template": "REPO_UBUNTU_TEMPLATE"
}
},
"repositoryFile": {}
}
try:
with Environment('/') as env:
updateRepo.actionexecute(None)
except Exception, exception:
failed = True
self.assertFalse(file_mock.called)
self.assertTrue(failed) | apache-2.0 |
hbrunn/hr | hr_payroll_extension/__openerp__.py | 11 | 1856 | # -*- coding:utf-8 -*-
#
#
# Copyright (C) 2013 Michael Telahun Makonnen <mmakonnen@gmail.com>.
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
{
'name': 'Payroll Extension',
'category': 'Human Resources',
'author': "Michael Telahun Makonnen,Odoo Community Association (OCA)",
'website': 'http://miketelahun.wordpress.com',
'license': 'AGPL-3',
'version': '1.0',
'description': """
Extended set of Payroll Rules and Structures
============================================
- Detailed calculation of worked hours, leaves, overtime, etc
- Overtime
- Paid and Unpaid Leaves
- Federal Income Tax Withholding rules
- Provident/Pension Fund contributions
- Various Earnings and Deductions
- Payroll Report
""",
'depends': [
'hr_attendance',
'hr_payroll',
'hr_payroll_period',
'hr_policy_absence',
'hr_policy_ot',
'hr_policy_presence',
'hr_public_holidays',
'hr_schedule',
],
'data': [
'data/hr_payroll_extension_data.xml',
'data/hr.salary.rule.csv',
'hr_payroll_view.xml',
],
'test': [
],
'installable': False,
}
| agpl-3.0 |
luotao1/Paddle | python/paddle/dataset/cifar.py | 1 | 5299 | # Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
CIFAR dataset.
This module will download dataset from https://dataset.bj.bcebos.com/cifar/cifar-10-python.tar.gz and https://dataset.bj.bcebos.com/cifar/cifar-100-python.tar.gz, parse train/test set into
paddle reader creators.
The CIFAR-10 dataset consists of 60000 32x32 color images in 10 classes,
with 6000 images per class. There are 50000 training images and 10000 test
images.
The CIFAR-100 dataset is just like the CIFAR-10, except it has 100 classes
containing 600 images each. There are 500 training images and 100 testing
images per class.
"""
from __future__ import print_function
import itertools
import numpy
import paddle.dataset.common
import paddle.utils.deprecated as deprecated
import tarfile
import six
from six.moves import cPickle as pickle
__all__ = ['train100', 'test100', 'train10', 'test10']
URL_PREFIX = 'https://dataset.bj.bcebos.com/cifar/'
CIFAR10_URL = URL_PREFIX + 'cifar-10-python.tar.gz'
CIFAR10_MD5 = 'c58f30108f718f92721af3b95e74349a'
CIFAR100_URL = URL_PREFIX + 'cifar-100-python.tar.gz'
CIFAR100_MD5 = 'eb9058c3a382ffc7106e4002c42a8d85'
def reader_creator(filename, sub_name, cycle=False):
def read_batch(batch):
data = batch[six.b('data')]
labels = batch.get(
six.b('labels'), batch.get(six.b('fine_labels'), None))
assert labels is not None
for sample, label in six.moves.zip(data, labels):
yield (sample / 255.0).astype(numpy.float32), int(label)
def reader():
while True:
with tarfile.open(filename, mode='r') as f:
names = (each_item.name for each_item in f
if sub_name in each_item.name)
for name in names:
if six.PY2:
batch = pickle.load(f.extractfile(name))
else:
batch = pickle.load(
f.extractfile(name), encoding='bytes')
for item in read_batch(batch):
yield item
if not cycle:
break
return reader
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar100",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def train100():
"""
CIFAR-100 training set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 99].
:return: Training reader creator
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5),
'train')
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar100",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def test100():
"""
CIFAR-100 test set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 99].
:return: Test reader creator.
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5),
'test')
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar10",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def train10(cycle=False):
"""
CIFAR-10 training set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 9].
:param cycle: whether to cycle through the dataset
:type cycle: bool
:return: Training reader creator
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5),
'data_batch',
cycle=cycle)
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar10",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def test10(cycle=False):
"""
CIFAR-10 test set creator.
It returns a reader creator, each sample in the reader is image pixels in
[0, 1] and label in [0, 9].
:param cycle: whether to cycle through the dataset
:type cycle: bool
:return: Test reader creator.
:rtype: callable
"""
return reader_creator(
paddle.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5),
'test_batch',
cycle=cycle)
@deprecated(
since="2.0.0",
update_to="paddle.vision.datasets.Cifar10",
reason="Please use new dataset API which supports paddle.io.DataLoader")
def fetch():
paddle.dataset.common.download(CIFAR10_URL, 'cifar', CIFAR10_MD5)
paddle.dataset.common.download(CIFAR100_URL, 'cifar', CIFAR100_MD5)
| apache-2.0 |
mspringett/namebench | nb_third_party/dns/tsig.py | 215 | 7851 | # Copyright (C) 2001-2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS TSIG support."""
import hmac
import struct
import dns.exception
import dns.rdataclass
import dns.name
class BadTime(dns.exception.DNSException):
"""Raised if the current time is not within the TSIG's validity time."""
pass
class BadSignature(dns.exception.DNSException):
"""Raised if the TSIG signature fails to verify."""
pass
class PeerError(dns.exception.DNSException):
"""Base class for all TSIG errors generated by the remote peer"""
pass
class PeerBadKey(PeerError):
"""Raised if the peer didn't know the key we used"""
pass
class PeerBadSignature(PeerError):
"""Raised if the peer didn't like the signature we sent"""
pass
class PeerBadTime(PeerError):
"""Raised if the peer didn't like the time we sent"""
pass
class PeerBadTruncation(PeerError):
"""Raised if the peer didn't like amount of truncation in the TSIG we sent"""
pass
default_algorithm = "HMAC-MD5.SIG-ALG.REG.INT"
BADSIG = 16
BADKEY = 17
BADTIME = 18
BADTRUNC = 22
def sign(wire, keyname, secret, time, fudge, original_id, error,
other_data, request_mac, ctx=None, multi=False, first=True,
algorithm=default_algorithm):
"""Return a (tsig_rdata, mac, ctx) tuple containing the HMAC TSIG rdata
for the input parameters, the HMAC MAC calculated by applying the
TSIG signature algorithm, and the TSIG digest context.
@rtype: (string, string, hmac.HMAC object)
@raises ValueError: I{other_data} is too long
@raises NotImplementedError: I{algorithm} is not supported
"""
(algorithm_name, digestmod) = get_algorithm(algorithm)
if first:
ctx = hmac.new(secret, digestmod=digestmod)
ml = len(request_mac)
if ml > 0:
ctx.update(struct.pack('!H', ml))
ctx.update(request_mac)
id = struct.pack('!H', original_id)
ctx.update(id)
ctx.update(wire[2:])
if first:
ctx.update(keyname.to_digestable())
ctx.update(struct.pack('!H', dns.rdataclass.ANY))
ctx.update(struct.pack('!I', 0))
long_time = time + 0L
upper_time = (long_time >> 32) & 0xffffL
lower_time = long_time & 0xffffffffL
time_mac = struct.pack('!HIH', upper_time, lower_time, fudge)
pre_mac = algorithm_name + time_mac
ol = len(other_data)
if ol > 65535:
raise ValueError('TSIG Other Data is > 65535 bytes')
post_mac = struct.pack('!HH', error, ol) + other_data
if first:
ctx.update(pre_mac)
ctx.update(post_mac)
else:
ctx.update(time_mac)
mac = ctx.digest()
mpack = struct.pack('!H', len(mac))
tsig_rdata = pre_mac + mpack + mac + id + post_mac
if multi:
ctx = hmac.new(secret)
ml = len(mac)
ctx.update(struct.pack('!H', ml))
ctx.update(mac)
else:
ctx = None
return (tsig_rdata, mac, ctx)
def hmac_md5(wire, keyname, secret, time, fudge, original_id, error,
other_data, request_mac, ctx=None, multi=False, first=True,
algorithm=default_algorithm):
return sign(wire, keyname, secret, time, fudge, original_id, error,
other_data, request_mac, ctx, multi, first, algorithm)
def validate(wire, keyname, secret, now, request_mac, tsig_start, tsig_rdata,
tsig_rdlen, ctx=None, multi=False, first=True):
"""Validate the specified TSIG rdata against the other input parameters.
@raises FormError: The TSIG is badly formed.
@raises BadTime: There is too much time skew between the client and the
server.
@raises BadSignature: The TSIG signature did not validate
@rtype: hmac.HMAC object"""
(adcount,) = struct.unpack("!H", wire[10:12])
if adcount == 0:
raise dns.exception.FormError
adcount -= 1
new_wire = wire[0:10] + struct.pack("!H", adcount) + wire[12:tsig_start]
current = tsig_rdata
(aname, used) = dns.name.from_wire(wire, current)
current = current + used
(upper_time, lower_time, fudge, mac_size) = \
struct.unpack("!HIHH", wire[current:current + 10])
time = ((upper_time + 0L) << 32) + (lower_time + 0L)
current += 10
mac = wire[current:current + mac_size]
current += mac_size
(original_id, error, other_size) = \
struct.unpack("!HHH", wire[current:current + 6])
current += 6
other_data = wire[current:current + other_size]
current += other_size
if current != tsig_rdata + tsig_rdlen:
raise dns.exception.FormError
if error != 0:
if error == BADSIG:
raise PeerBadSignature
elif error == BADKEY:
raise PeerBadKey
elif error == BADTIME:
raise PeerBadTime
elif error == BADTRUNC:
raise PeerBadTruncation
else:
raise PeerError('unknown TSIG error code %d' % error)
time_low = time - fudge
time_high = time + fudge
if now < time_low or now > time_high:
raise BadTime
(junk, our_mac, ctx) = sign(new_wire, keyname, secret, time, fudge,
original_id, error, other_data,
request_mac, ctx, multi, first, aname)
if (our_mac != mac):
raise BadSignature
return ctx
def get_algorithm(algorithm):
"""Returns the wire format string and the hash module to use for the
specified TSIG algorithm
@rtype: (string, hash constructor)
@raises NotImplementedError: I{algorithm} is not supported
"""
hashes = {}
try:
import hashlib
hashes[dns.name.from_text('hmac-sha224')] = hashlib.sha224
hashes[dns.name.from_text('hmac-sha256')] = hashlib.sha256
hashes[dns.name.from_text('hmac-sha384')] = hashlib.sha384
hashes[dns.name.from_text('hmac-sha512')] = hashlib.sha512
hashes[dns.name.from_text('hmac-sha1')] = hashlib.sha1
hashes[dns.name.from_text('HMAC-MD5.SIG-ALG.REG.INT')] = hashlib.md5
import sys
if sys.hexversion < 0x02050000:
# hashlib doesn't conform to PEP 247: API for
# Cryptographic Hash Functions, which hmac before python
# 2.5 requires, so add the necessary items.
class HashlibWrapper:
def __init__(self, basehash):
self.basehash = basehash
self.digest_size = self.basehash().digest_size
def new(self, *args, **kwargs):
return self.basehash(*args, **kwargs)
for name in hashes:
hashes[name] = HashlibWrapper(hashes[name])
except ImportError:
import md5, sha
hashes[dns.name.from_text('HMAC-MD5.SIG-ALG.REG.INT')] = md5.md5
hashes[dns.name.from_text('hmac-sha1')] = sha.sha
if isinstance(algorithm, (str, unicode)):
algorithm = dns.name.from_text(algorithm)
if algorithm in hashes:
return (algorithm.to_digestable(), hashes[algorithm])
raise NotImplementedError("TSIG algorithm " + str(algorithm) +
" is not supported")
| apache-2.0 |
Permutatrix/servo | tests/wpt/web-platform-tests/webdriver/tests/state/get_element_tag_name.py | 5 | 6273 | from tests.support.asserts import assert_error, assert_dialog_handled, assert_success
from tests.support.inline import inline
from tests.support.fixtures import create_dialog
# 13.6 Get Element Tag Name
def test_no_browsing_context(session, create_window):
# 13.6 step 1
session.window_handle = create_window()
session.close()
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id="foo"))
assert_error(result, "no such window")
def test_handle_prompt_dismiss(new_session, add_browser_capabilites):
# 13.6 step 2
_, session = new_session({"capabilities": {"alwaysMatch": add_browser_capabilites({"unhandledPromptBehavior": "dismiss"})}})
session.url = inline("<input id=foo>")
element = session.find.css("#foo", all=False)
create_dialog(session)("alert", text="dismiss #1", result_var="dismiss1")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
assert_dialog_handled(session, "dismiss #1")
create_dialog(session)("confirm", text="dismiss #2", result_var="dismiss2")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
assert_dialog_handled(session, "dismiss #2")
create_dialog(session)("prompt", text="dismiss #3", result_var="dismiss3")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
assert_dialog_handled(session, "dismiss #3")
def test_handle_prompt_accept(new_session, add_browser_capabilites):
# 13.6 step 2
_, session = new_session({"capabilities": {"alwaysMatch": add_browser_capabilites({"unhandledPromptBehavior": "accept"})}})
session.url = inline("<input id=foo>")
element = session.find.css("#foo", all=False)
create_dialog(session)("alert", text="dismiss #1", result_var="dismiss1")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
assert_dialog_handled(session, "dismiss #1")
create_dialog(session)("confirm", text="dismiss #2", result_var="dismiss2")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
assert_dialog_handled(session, "dismiss #2")
create_dialog(session)("prompt", text="dismiss #3", result_var="dismiss3")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
assert_dialog_handled(session, "dismiss #3")
def test_handle_prompt_missing_value(session):
# 13.6 step 2
session.url = inline("<input id=foo>")
element = session.find.css("#foo", all=False)
create_dialog(session)("alert", text="dismiss #1", result_var="dismiss1")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_error(result, "unexpected alert open")
assert_dialog_handled(session, "dismiss #1")
create_dialog(session)("confirm", text="dismiss #2", result_var="dismiss2")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_error(result, "unexpected alert open")
assert_dialog_handled(session, "dismiss #2")
create_dialog(session)("prompt", text="dismiss #3", result_var="dismiss3")
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_error(result, "unexpected alert open")
assert_dialog_handled(session, "dismiss #3")
def test_element_not_found(session):
# 13.6 Step 3
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id="foo"))
assert_error(result, "no such element")
def test_element_stale(session):
# 13.6 step 4
session.url = inline("<input id=foo>")
element = session.find.css("input", all=False)
session.refresh()
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_error(result, "stale element reference")
def test_get_element_tag_name(session):
# 13.6 step 6
session.url = inline("<input id=foo>")
element = session.find.css("input", all=False)
result = session.transport.send("GET", "session/{session_id}/element/{element_id}/name"
.format(session_id=session.session_id,
element_id=element.id))
assert_success(result, "input")
| mpl-2.0 |
ankurankan/scikit-learn | examples/bicluster/bicluster_newsgroups.py | 42 | 7098 | """
================================================================
Biclustering documents with the Spectral Co-clustering algorithm
================================================================
This example demonstrates the Spectral Co-clustering algorithm on the
twenty newsgroups dataset. The 'comp.os.ms-windows.misc' category is
excluded because it contains many posts containing nothing but data.
The TF-IDF vectorized posts form a word frequency matrix, which is
then biclustered using Dhillon's Spectral Co-Clustering algorithm. The
resulting document-word biclusters indicate subsets words used more
often in those subsets documents.
For a few of the best biclusters, its most common document categories
and its ten most important words get printed. The best biclusters are
determined by their normalized cut. The best words are determined by
comparing their sums inside and outside the bicluster.
For comparison, the documents are also clustered using
MiniBatchKMeans. The document clusters derived from the biclusters
achieve a better V-measure than clusters found by MiniBatchKMeans.
Output::
Vectorizing...
Coclustering...
Done in 9.53s. V-measure: 0.4455
MiniBatchKMeans...
Done in 12.00s. V-measure: 0.3309
Best biclusters:
----------------
bicluster 0 : 1951 documents, 4373 words
categories : 23% talk.politics.guns, 19% talk.politics.misc, 14% sci.med
words : gun, guns, geb, banks, firearms, drugs, gordon, clinton, cdt, amendment
bicluster 1 : 1165 documents, 3304 words
categories : 29% talk.politics.mideast, 26% soc.religion.christian, 25% alt.atheism
words : god, jesus, christians, atheists, kent, sin, morality, belief, resurrection, marriage
bicluster 2 : 2219 documents, 2830 words
categories : 18% comp.sys.mac.hardware, 16% comp.sys.ibm.pc.hardware, 16% comp.graphics
words : voltage, dsp, board, receiver, circuit, shipping, packages, stereo, compression, package
bicluster 3 : 1860 documents, 2745 words
categories : 26% rec.motorcycles, 23% rec.autos, 13% misc.forsale
words : bike, car, dod, engine, motorcycle, ride, honda, cars, bmw, bikes
bicluster 4 : 12 documents, 155 words
categories : 100% rec.sport.hockey
words : scorer, unassisted, reichel, semak, sweeney, kovalenko, ricci, audette, momesso, nedved
"""
from __future__ import print_function
print(__doc__)
from collections import defaultdict
import operator
import re
from time import time
import numpy as np
from sklearn.cluster.bicluster import SpectralCoclustering
from sklearn.cluster import MiniBatchKMeans
from sklearn.externals import six
from sklearn.datasets.twenty_newsgroups import fetch_20newsgroups
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.cluster import v_measure_score
def number_aware_tokenizer(doc):
""" Tokenizer that maps all numeric tokens to a placeholder.
For many applications, tokens that begin with a number are not directly
useful, but the fact that such a token exists can be relevant. By applying
this form of dimensionality reduction, some methods may perform better.
"""
token_pattern = re.compile(u'(?u)\\b\\w\\w+\\b')
tokens = token_pattern.findall(doc)
tokens = ["#NUMBER" if token[0] in "0123456789_" else token
for token in tokens]
return tokens
# exclude 'comp.os.ms-windows.misc'
categories = ['alt.atheism', 'comp.graphics',
'comp.sys.ibm.pc.hardware', 'comp.sys.mac.hardware',
'comp.windows.x', 'misc.forsale', 'rec.autos',
'rec.motorcycles', 'rec.sport.baseball',
'rec.sport.hockey', 'sci.crypt', 'sci.electronics',
'sci.med', 'sci.space', 'soc.religion.christian',
'talk.politics.guns', 'talk.politics.mideast',
'talk.politics.misc', 'talk.religion.misc']
newsgroups = fetch_20newsgroups(categories=categories)
y_true = newsgroups.target
vectorizer = TfidfVectorizer(stop_words='english', min_df=5,
tokenizer=number_aware_tokenizer)
cocluster = SpectralCoclustering(n_clusters=len(categories),
svd_method='arpack', random_state=0)
kmeans = MiniBatchKMeans(n_clusters=len(categories), batch_size=20000,
random_state=0)
print("Vectorizing...")
X = vectorizer.fit_transform(newsgroups.data)
print("Coclustering...")
start_time = time()
cocluster.fit(X)
y_cocluster = cocluster.row_labels_
print("Done in {:.2f}s. V-measure: {:.4f}".format(
time() - start_time,
v_measure_score(y_cocluster, y_true)))
print("MiniBatchKMeans...")
start_time = time()
y_kmeans = kmeans.fit_predict(X)
print("Done in {:.2f}s. V-measure: {:.4f}".format(
time() - start_time,
v_measure_score(y_kmeans, y_true)))
feature_names = vectorizer.get_feature_names()
document_names = list(newsgroups.target_names[i] for i in newsgroups.target)
def bicluster_ncut(i):
rows, cols = cocluster.get_indices(i)
if not (np.any(rows) and np.any(cols)):
import sys
return sys.float_info.max
row_complement = np.nonzero(np.logical_not(cocluster.rows_[i]))[0]
col_complement = np.nonzero(np.logical_not(cocluster.columns_[i]))[0]
weight = X[rows[:, np.newaxis], cols].sum()
cut = (X[row_complement[:, np.newaxis], cols].sum() +
X[rows[:, np.newaxis], col_complement].sum())
return cut / weight
def most_common(d):
"""Items of a defaultdict(int) with the highest values.
Like Counter.most_common in Python >=2.7.
"""
return sorted(six.iteritems(d), key=operator.itemgetter(1), reverse=True)
bicluster_ncuts = list(bicluster_ncut(i)
for i in xrange(len(newsgroups.target_names)))
best_idx = np.argsort(bicluster_ncuts)[:5]
print()
print("Best biclusters:")
print("----------------")
for idx, cluster in enumerate(best_idx):
n_rows, n_cols = cocluster.get_shape(cluster)
cluster_docs, cluster_words = cocluster.get_indices(cluster)
if not len(cluster_docs) or not len(cluster_words):
continue
# categories
counter = defaultdict(int)
for i in cluster_docs:
counter[document_names[i]] += 1
cat_string = ", ".join("{:.0f}% {}".format(float(c) / n_rows * 100, name)
for name, c in most_common(counter)[:3])
# words
out_of_cluster_docs = cocluster.row_labels_ != cluster
out_of_cluster_docs = np.where(out_of_cluster_docs)[0]
word_col = X[:, cluster_words]
word_scores = np.array(word_col[cluster_docs, :].sum(axis=0) -
word_col[out_of_cluster_docs, :].sum(axis=0))
word_scores = word_scores.ravel()
important_words = list(feature_names[cluster_words[i]]
for i in word_scores.argsort()[:-11:-1])
print("bicluster {} : {} documents, {} words".format(
idx, n_rows, n_cols))
print("categories : {}".format(cat_string))
print("words : {}\n".format(', '.join(important_words)))
| bsd-3-clause |
CyanogenMod/android_external_chromium_org | components/test/data/password_manager/tests.py | 8 | 15341 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# -*- coding: utf-8 -*-
"""Automated tests for many websites"""
import argparse
import logging
from environment import Environment
from websitetest import WebsiteTest
class Facebook(WebsiteTest):
def Login(self):
self.GoTo("https://www.facebook.com")
self.FillUsernameInto("[name='email']")
self.FillPasswordInto("[name='pass']")
self.Submit("[name='pass']")
def Logout(self):
self.WaitUntilDisplayed("#userNavigationLabel")
self.Click("#userNavigationLabel")
self.WaitUntilDisplayed("#logout_form [type='submit']")
self.Click("#logout_form [type='submit']")
class Google(WebsiteTest):
def Login(self):
self.GoTo("https://accounts.google.com/ServiceLogin?sacu=1&continue=")
self.FillUsernameInto("#Email")
self.FillPasswordInto("#Passwd")
self.Submit("#Passwd")
def Logout(self):
self.GoTo("https://accounts.google.com/Logout")
class Linkedin(WebsiteTest):
def Login(self):
self.GoTo("https://www.linkedin.com")
self.FillUsernameInto("#session_key-login")
self.FillPasswordInto("#session_password-login")
self.Submit("#session_password-login")
def Logout(self):
self.WaitUntilDisplayed(".account-toggle")
self.HoverOver(".account-toggle")
self.WaitUntilDisplayed(".account-settings .act-set-action")
self.Click(".account-settings .act-set-action")
class Mailru(WebsiteTest):
def Login(self):
self.GoTo("https://mail.ru")
self.FillUsernameInto("#mailbox__login")
self.FillPasswordInto("#mailbox__password")
self.Submit("#mailbox__password")
def Logout(self):
self.Click("#PH_logoutLink")
class Nytimes(WebsiteTest):
def Login(self):
self.GoTo("https://myaccount.nytimes.com/auth/login")
self.FillUsernameInto("#userid")
self.FillPasswordInto("#password")
self.Submit("#password")
def Logout(self):
self.GoTo("https://myaccount.nytimes.com/gst/signout")
class Pinterest(WebsiteTest):
def Login(self):
self.GoTo("https://www.pinterest.com/login/")
self.FillUsernameInto("[name='username_or_email']")
self.FillPasswordInto("[name='password']")
self.Submit("[name='password']")
def Logout(self):
self.GoTo("https://www.pinterest.com/logout/")
class Reddit(WebsiteTest):
def Login(self):
self.GoTo("http://www.reddit.com")
self.Click(".user .login-required")
self.FillUsernameInto("#user_login")
self.FillPasswordInto("#passwd_login")
self.Wait(2)
self.Submit("#passwd_login")
def Logout(self):
self.Click("form[action='http://www.reddit.com/logout'] a")
class Tumblr(WebsiteTest):
def Login(self):
self.GoTo("https://www.tumblr.com/login")
self.FillUsernameInto("#signup_email")
self.FillPasswordInto("#signup_password")
self.Submit("#signup_password")
def Logout(self):
self.GoTo("https://www.tumblr.com/logout")
class Wikipedia(WebsiteTest):
def Login(self):
self.GoTo("https://en.wikipedia.org/w/index.php?title=Special:UserLogin")
self.FillUsernameInto("#wpName1")
self.FillPasswordInto("#wpPassword1")
self.Submit("#wpPassword1")
def Logout(self):
self.GoTo("https://en.wikipedia.org/w/index.php?title=Special:UserLogout")
class Yandex(WebsiteTest):
def Login(self):
self.GoTo("https://mail.yandex.com")
self.FillUsernameInto("#b-mail-domik-username11")
self.FillPasswordInto("#b-mail-domik-password11")
self.Click(".b-mail-button__button")
def Logout(self):
while not self.IsDisplayed(".b-mail-dropdown__item__content"
u".Выход.daria-action"):
self.ClickIfClickable(".header-user-pic.b-mail-dropdown__handle")
self.Wait(1)
self.Click(u".b-mail-dropdown__item__content.Выход.daria-action")
# Disabled tests.
# Bug not reproducible without test.
class Amazon(WebsiteTest):
def Login(self):
self.GoTo(
"https://www.amazon.com/ap/signin?openid.assoc_handle=usflex"
"&openid.mode=checkid_setup&openid.ns=http%3A%2F%2Fspecs.openid.net"
"%2Fauth%2F2.0")
self.FillUsernameInto("[name='email']")
self.FillPasswordInto("[name='password']")
self.Submit("[name='password']")
def Logout(self):
while not self.IsDisplayed("#nav-item-signout"):
self.Wait(1)
self.HoverOver("#nav-signin-title")
self.Click("#nav-item-signout")
# Password not saved.
class Ask(WebsiteTest):
def Login(self):
self.GoTo("http://www.ask.com/answers/browse?qsrc=321&q=&o=0&l=dir#")
while not self.IsDisplayed("[name='username']"):
self.Click("#a16CnbSignInText")
self.Wait(1)
self.FillUsernameInto("[name='username']")
self.FillPasswordInto("[name='password']")
self.Click(".signin_show.signin_submit")
def Logout(self):
self.WaitUntilDisplayed("#a16CnbSignInText")
self.Click("#a16CnbSignInText")
# Password not saved.
class Baidu(WebsiteTest):
def Login(self):
self.GoTo("http://www.baidu.com/")
self.Click("[name='tj_login']")
self.WaitUntilDisplayed("[name='userName']")
self.FillUsernameInto("[name='userName']")
self.FillPasswordInto("[name='password']")
self.Submit("[name='password']")
def Logout(self):
self.Wait(1)
self.GoTo("https://passport.baidu.com/?logout&u=http://www.baidu.com")
# http://crbug.com/368690
class Cnn(WebsiteTest):
def Login(self):
self.GoTo("http://www.cnn.com")
self.Wait(5)
while not self.IsDisplayed(".cnnOvrlyBtn.cnnBtnLogIn"):
self.ClickIfClickable("#hdr-auth .no-border.no-pad-right a")
self.Wait(1)
self.Click(".cnnOvrlyBtn.cnnBtnLogIn")
self.FillUsernameInto("#cnnOverlayEmail1l")
self.FillPasswordInto("#cnnOverlayPwd")
self.Click(".cnnOvrlyBtn.cnnBtnLogIn")
self.Click(".cnnOvrlyBtn.cnnBtnLogIn")
self.Wait(5)
def Logout(self):
self.Wait(4)
self.Click("#hdr-auth .no-border.no-pad-right")
# http://crbug.com/368690
class Ebay(WebsiteTest):
def Login(self):
self.GoTo("https://signin.ebay.com/")
self.FillUsernameInto("[name='userid']")
self.FillPasswordInto("[name='pass']")
self.Submit("[name='pass']")
def Logout(self):
self.WaitUntilDisplayed("#gh-ug")
self.Click("#gh-ug")
self.WaitUntilDisplayed("#gh-uo")
self.Click("#gh-uo")
# Iframe, password saved but not autofileld.
class Espn(WebsiteTest):
def Login(self):
self.GoTo("http://espn.go.com/")
while not self.IsDisplayed("#cboxLoadedContent iframe"):
self.Click("#signin .cbOverlay")
self.Wait(1)
frame = self.driver.find_element_by_css_selector("#cboxLoadedContent "
"iframe")
self.driver.switch_to_frame(frame)
self.WaitUntilDisplayed("#username")
self.FillUsernameInto("#username")
self.FillPasswordInto("#password")
while self.IsDisplayed("#password"):
self.ClickIfClickable("#submitBtn")
self.Wait(1)
def Logout(self):
self.WaitUntilDisplayed("#signin .small")
self.Click("#signin .small")
# http://crbug.com/367768
class Live(WebsiteTest):
def Login(self):
self.GoTo("https://www.live.com")
self.FillUsernameInto("[name='login']")
self.FillPasswordInto("[name='passwd']")
self.Submit("[name='passwd']")
def Logout(self):
self.WaitUntilDisplayed("#c_meun")
self.Click("#c_meun")
self.WaitUntilDisplayed("#c_signout")
self.Click("#c_signout")
# http://crbug.com/368690
class One63(WebsiteTest):
def Login(self):
self.GoTo("http://www.163.com")
self.HoverOver("#js_N_navHighlight")
self.WaitUntilDisplayed("#js_loginframe_username")
self.FillUsernameInto("#js_loginframe_username")
self.FillPasswordInto(".ntes-loginframe-label-ipt[type='password']")
self.Click(".ntes-loginframe-btn")
def Logout(self):
self.WaitUntilDisplayed("#js_N_navLogout")
self.Click("#js_N_navLogout")
# http://crbug.com/368690
class Vube(WebsiteTest):
def Login(self):
self.GoTo("https://vube.com")
self.Click("[vube-login='']")
self.WaitUntilDisplayed("[ng-model='login.user']")
self.FillUsernameInto("[ng-model='login.user']")
self.FillPasswordInto("[ng-model='login.pass']")
while (self.IsDisplayed("[ng-model='login.pass']")
and not self.IsDisplayed(".prompt.alert")):
self.ClickIfClickable("[ng-click='login()']")
self.Wait(1)
def Logout(self):
self.WaitUntilDisplayed("[ng-click='user.logout()']")
self.Click("[ng-click='user.logout()']")
# Tests that can cause a crash.
class Yahoo(WebsiteTest):
def Login(self):
self.GoTo("https://login.yahoo.com")
self.FillUsernameInto("#username")
self.FillPasswordInto("#passwd")
self.Submit("#passwd")
def Logout(self):
self.WaitUntilDisplayed(".tab.tab-user>.mod.view_default")
self.HoverOver(".tab.tab-user>.mod.view_default")
self.WaitUntilDisplayed("[data-pos='4'] .lbl.y-link-1")
self.Click("[data-pos='4'] .lbl.y-link-1")
def Tests(environment):
# Working tests.
environment.AddWebsiteTest(Facebook("facebook"))
environment.AddWebsiteTest(Google("google"))
environment.AddWebsiteTest(Linkedin("linkedin"))
environment.AddWebsiteTest(Mailru("mailru"))
environment.AddWebsiteTest(Nytimes("nytimes"))
environment.AddWebsiteTest(Pinterest("pinterest"))
environment.AddWebsiteTest(Reddit("reddit", username_not_auto=True))
environment.AddWebsiteTest(Tumblr("tumblr", username_not_auto=True))
environment.AddWebsiteTest(Wikipedia("wikipedia", username_not_auto=True))
environment.AddWebsiteTest(Yandex("yandex"))
# Disabled tests.
# Bug not reproducible without test.
environment.AddWebsiteTest(Amazon("amazon"), disabled=True)
# Password not saved.
environment.AddWebsiteTest(Ask("ask"), disabled=True)
# Password not saved.
environment.AddWebsiteTest(Baidu("baidu"), disabled=True)
# http://crbug.com/368690
environment.AddWebsiteTest(Cnn("cnn"), disabled=True)
# http://crbug.com/368690
environment.AddWebsiteTest(Ebay("ebay"), disabled=True)
# Iframe, password saved but not autofileld.
environment.AddWebsiteTest(Espn("espn"), disabled=True)
# http://crbug.com/367768
environment.AddWebsiteTest(Live("live", username_not_auto=True),
disabled=True)
# http://crbug.com/368690
environment.AddWebsiteTest(One63("163"), disabled=True)
# http://crbug.com/368690
environment.AddWebsiteTest(Vube("vube"), disabled=True)
# Tests that can cause a crash (the cause of the crash is not related to the
# password manager).
environment.AddWebsiteTest(Yahoo("yahoo", username_not_auto=True),
disabled=True)
def RunTests(chrome_path, chromedriver_path, profile_path,
environment_passwords_path, enable_automatic_password_saving,
environment_numeric_level, log_to_console, environment_log_file,
all_tests, tests):
"""Runs the the tests
Args:
chrome_path: The chrome binary file.
chromedriver_path: The chromedriver binary file.
profile_path: The chrome testing profile folder.
environment_passwords_path: The usernames and passwords file.
enable_automatic_password_saving: If True, the passwords are going to be
saved without showing the prompt.
environment_numeric_level: The log verbosity.
log_to_console: If True, the debug logs will be shown on the console.
environment_log_file: The file where to store the log. If it's empty, the
log is not stored.
all_tests: If True, all the tests are going to be ran.
tests: A list of the names of the WebsiteTests that are going to be tested.
Raises:
Exception: An exception is raised if the one of the tests fails.
"""
environment = Environment(chrome_path, chromedriver_path, profile_path,
environment_passwords_path,
enable_automatic_password_saving,
environment_numeric_level,
log_to_console,
environment_log_file)
# Test which care about the save-password prompt need the prompt
# to be shown. Automatic password saving results in no prompt.
run_prompt_tests = not enable_automatic_password_saving
Tests(environment)
if all_tests:
environment.AllTests(run_prompt_tests)
elif tests:
environment.Test(tests, run_prompt_tests)
else:
environment.WorkingTests(run_prompt_tests)
environment.Quit()
# Tests setup.
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Password Manager automated tests help.")
parser.add_argument(
"--chrome-path", action="store", dest="chrome_path",
help="Set the chrome path (required).", nargs=1, required=True)
parser.add_argument(
"--chromedriver-path", action="store", dest="chromedriver_path",
help="Set the chromedriver path (required).", nargs=1, required=True)
parser.add_argument(
"--profile-path", action="store", dest="profile_path",
help="Set the profile path (required). You just need to choose a "
"temporary empty folder. If the folder is not empty all its content "
"is going to be removed.",
nargs=1, required=True)
parser.add_argument(
"--passwords-path", action="store", dest="passwords_path",
help="Set the usernames/passwords path (required).", nargs=1,
required=True)
parser.add_argument("--all", action="store_true", dest="all",
help="Run all tests.")
parser.add_argument("--log", action="store", nargs=1, dest="log_level",
help="Set log level.")
parser.add_argument("--log-screen", action="store_true", dest="log_screen",
help="Show log on the screen.")
parser.add_argument("--log-file", action="store", dest="log_file",
help="Write the log in a file.", nargs=1)
parser.add_argument("tests", help="Tests to be run.", nargs="*")
args = parser.parse_args()
passwords_path = args.passwords_path[0]
numeric_level = None
if args.log_level:
numeric_level = getattr(logging, args.log_level[0].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError("Invalid log level: %s" % args.log_level[0])
log_file = None
if args.log_file:
log_file = args.log_file[0]
# Run the test without enable-automatic-password-saving to check whether or
# not the prompt is shown in the way we expected.
RunTests(args.chrome_path[0],
args.chromedriver_path[0],
args.profile_path[0],
passwords_path,
False,
numeric_level,
args.log_screen,
log_file,
args.all,
args.tests)
# Run the test with enable-automatic-password-saving to check whether or not
# the passwords is stored in the the way we expected.
RunTests(args.chrome_path[0],
args.chromedriver_path[0],
args.profile_path[0],
passwords_path,
True,
numeric_level,
args.log_screen,
log_file,
args.all,
args.tests)
| bsd-3-clause |
weolar/miniblink49 | third_party/WebKit/Tools/Scripts/webkitpy/style/checkers/xcodeproj_unittest.py | 48 | 3070 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for xcodeproj.py."""
import unittest
import xcodeproj
class TestErrorHandler(object):
"""Error handler for XcodeProjectFileChecker unittests"""
def __init__(self, handler):
self.handler = handler
def turn_off_line_filtering(self):
pass
def __call__(self, line_number, category, confidence, message):
self.handler(self, line_number, category, confidence, message)
return True
class XcodeProjectFileCheckerTest(unittest.TestCase):
"""Tests XcodeProjectFileChecker class."""
def assert_no_error(self, lines):
def handler(error_handler, line_number, category, confidence, message):
self.fail('Unexpected error: %d %s %d %s' % (line_number, category, confidence, message))
error_handler = TestErrorHandler(handler)
checker = xcodeproj.XcodeProjectFileChecker('', error_handler)
checker.check(lines)
def assert_error(self, lines, expected_message):
self.had_error = False
def handler(error_handler, line_number, category, confidence, message):
self.assertEqual(expected_message, message)
self.had_error = True
error_handler = TestErrorHandler(handler)
checker = xcodeproj.XcodeProjectFileChecker('', error_handler)
checker.check(lines)
self.assertTrue(self.had_error, '%s should have error: %s.' % (lines, expected_message))
def test_detect_development_region(self):
self.assert_no_error(['developmentRegion = English;'])
self.assert_error([''], 'Missing "developmentRegion = English".')
self.assert_error(['developmentRegion = Japanese;'],
'developmentRegion is not English.')
| apache-2.0 |
igordejanovic/textX | tests/functional/test_metamodel/test_model_params.py | 1 | 5362 | from __future__ import unicode_literals
from click.testing import CliRunner
import os.path
from pytest import raises
from textx import metamodel_from_str
from textx.cli import textx
from textx.exceptions import TextXError
from textx.generators import gen_file, get_output_filename
from textx import language, generator, register_language, register_generator
grammar = r"""
Model: 'MyModel' name=ID;
"""
text = r"""
MyModel test1
"""
def test_model_params():
mm = metamodel_from_str(grammar)
mm.model_param_defs.add(
"parameter1", "an example param (1)"
)
mm.model_param_defs.add(
"parameter2", "an example param (2)"
)
m = mm.model_from_str(text, parameter1='P1', parameter2='P2')
assert m.name == 'test1'
assert hasattr(m, '_tx_model_params')
assert len(m._tx_model_params) == 2
assert len(m._tx_model_params.used_keys) == 0
assert not m._tx_model_params.all_used
assert m._tx_model_params['parameter1'] == 'P1'
assert len(m._tx_model_params.used_keys) == 1
assert 'parameter1' in m._tx_model_params.used_keys
assert 'parameter2' not in m._tx_model_params.used_keys
assert not m._tx_model_params.all_used
assert m._tx_model_params['parameter2'] == 'P2'
assert len(m._tx_model_params.used_keys) == 2
assert 'parameter1' in m._tx_model_params.used_keys
assert 'parameter2' in m._tx_model_params.used_keys
assert m._tx_model_params.all_used
assert m._tx_model_params.get(
'missing_params', default='default value') == 'default value'
assert m._tx_model_params.get(
'parameter1', default='default value') == 'P1'
with raises(TextXError, match=".*unknown parameter myerror2.*"):
mm.model_from_str(text, parameter1='P1', myerror2='P2')
assert len(mm.model_param_defs) >= 2
assert 'parameter1' in mm.model_param_defs
assert 'parameter1' in mm.model_param_defs
assert mm.model_param_defs[
'parameter1'].description == "an example param (1)"
def test_model_params_empty():
mm = metamodel_from_str(grammar)
mm.model_param_defs.add(
"parameter1", "an example param (1)"
)
mm.model_param_defs.add(
"parameter2", "an example param (2)"
)
m = mm.model_from_str(text)
assert m.name == 'test1'
assert hasattr(m, '_tx_model_params')
assert len(m._tx_model_params) == 0
assert m._tx_model_params.all_used
def test_model_params_file_based():
mm = metamodel_from_str(grammar)
mm.model_param_defs.add(
"parameter1", "an example param (1)"
)
mm.model_param_defs.add(
"parameter2", "an example param (2)"
)
current_dir = os.path.dirname(__file__)
m = mm.model_from_file(
os.path.join(current_dir, 'test_model_params',
'model.txt'),
parameter1='P1', parameter2='P2')
assert m.name == 'file_based'
assert hasattr(m, '_tx_model_params')
assert len(m._tx_model_params) == 2
def test_model_params_generate_cli():
"""
Test that model parameters are passed through generate cli command.
"""
# register test language
@language('testlang', '*.mpt')
def model_param_test():
def processor(model, metamodel):
# Just to be sure that processor sees the model parameters
model.model_params = model._tx_model_params
mm = metamodel_from_str(grammar)
mm.model_param_defs.add('meaning_of_life', 'The Meaning of Life')
mm.register_model_processor(processor)
return mm
register_language(model_param_test)
# register language generator
@generator('testlang', 'testtarget')
def mytarget_generator(metamodel, model, output_path, overwrite,
debug=False, **custom_args):
# Dump custom args for testing
txt = '\n'.join(["{}={}".format(arg_name, arg_value)
for arg_name, arg_value in custom_args.items()])
# Dump model params processed by model processor for testing
txt += '\nModel params:'
txt += '\n'.join(["{}={}".format(param_name, param_value)
for param_name, param_value in model.model_params.items()])
output_file = get_output_filename(model._tx_filename, None, 'testtarget')
def gen_callback():
with open(output_file, 'w') as f:
f.write(txt)
gen_file(model._tx_filename, output_file, gen_callback, overwrite)
register_generator(mytarget_generator)
# Run generator from CLI
this_folder = os.path.abspath(os.path.dirname(__file__))
runner = CliRunner()
model_file = os.path.join(this_folder, 'model_param_generate_test.mpt')
result = runner.invoke(textx, ['generate',
'--language', 'testlang',
'--target', 'testtarget',
'--overwrite', model_file,
'--meaning_of_life', '42',
'--someparam', 'somevalue'])
assert result.exit_code == 0
output_file = os.path.join(this_folder, 'model_param_generate_test.testtarget')
with open(output_file, 'r') as f:
content = f.read()
assert 'someparam=somevalue' in content
assert 'Model params:meaning_of_life=42' in content
| mit |
Gitlab11/odoo | addons/procurement/__openerp__.py | 267 | 2661 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name' : 'Procurements',
'version' : '1.0',
'author' : 'OpenERP SA',
'website': 'https://www.odoo.com/page/manufacturing',
'category' : 'Hidden/Dependency',
'depends' : ['base', 'product'],
'description': """
This is the module for computing Procurements.
==============================================
This procurement module only depends on the product module and is not useful
on itself. Procurements represent needs that need to be solved by a procurement
rule. When a procurement is created, it is confirmed. When a rule is found,
it will be put in running state. After, it will check if what needed to be done
for the rule has been executed. Then it will go to the done state. A procurement
can also go into exception, for example when it can not find a rule and it can be cancelled.
The mechanism will be extended by several modules. The procurement rule of stock will
create a move and the procurement will be fulfilled when the move is done.
The procurement rule of sale_service will create a task. Those of purchase or
mrp will create a purchase order or a manufacturing order.
The scheduler will check if it can assign a rule to confirmed procurements and if
it can put running procurements to done.
Procurements in exception should be checked manually and can be re-run.
""",
'data': [
'security/ir.model.access.csv',
'security/procurement_security.xml',
'procurement_data.xml',
'wizard/schedulers_all_view.xml',
'procurement_view.xml',
'company_view.xml',
],
'demo': [],
'test': ['test/procurement.yml'],
'installable': True,
'auto_install': True,
}
| agpl-3.0 |
HeadCow/ARPS | ML_model/model/utils.py | 3 | 5695 | # -*- coding:utf-8 -*-
from __future__ import division
try:
import cPickle as pickle
except ImportError:
import pickle
import io
import os
import random
from collections import Counter, defaultdict
from model.base.document import Document
def save_to_disk(path_to_disk, obj, overwrite=False):
""" Pickle an object to disk """
dirname = os.path.dirname(path_to_disk)
if not os.path.exists(dirname):
raise ValueError("Path " + dirname + " does not exist")
if not overwrite and os.path.exists(path_to_disk):
raise ValueError("File " + path_to_disk + "already exists")
pickle.dump(obj, open(path_to_disk, 'wb'))
def load_from_disk(path_to_disk):
""" Load a pickle from disk to memory """
if not os.path.exists(path_to_disk):
raise ValueError("File " + path_to_disk + " does not exist")
return pickle.load(open(path_to_disk, 'rb'))
def get_documents(data_dir, as_generator=True, shuffle=False):
"""
Extract documents from *.txt files in a given directory
:param data_dir: path to the directory with .txt files
:param as_generator: flag whether to return a document generator or a list
:param shuffle: flag whether to return the documents
in a shuffled vs sorted order
:return: generator or a list of Document objects
"""
files = list({filename[:-4] for filename in os.listdir(data_dir) if '.DS_S' not in filename})
files.sort()
if shuffle:
random.shuffle(files)
generator = (Document(doc_id, os.path.join(data_dir, f + '.txt'))
for doc_id, f in enumerate(files))
return generator if as_generator else list(generator)
def get_all_answers(data_dir, filtered_by=None):
"""
Extract ground truth answers from *.lab files in a given directory
:param data_dir: path to the directory with .lab files
:param filtered_by: whether to filter the answers.
:return: dictionary of the form e.g. {'101231': set('lab1', 'lab2') etc.}
"""
answers = dict()
files = {filename[:-4] for filename in os.listdir(data_dir)}
for f in files:
answers[f] = get_answers_for_doc(f + '.txt',
data_dir,
filtered_by=filtered_by)
return answers
def get_answers_for_doc(doc_name, data_dir, filtered_by=None):
"""
Read ground_truth answers from a .lab file corresponding to the doc_name
:param doc_name: the name of the document, should end with .txt
:param data_dir: directory in which the documents and answer files are
:param filtered_by: whether to filter the answers.
:return: set of unicodes containing answers for this particular document
"""
filename = os.path.join(data_dir, doc_name[:-4] + '.lab')
if not os.path.exists(filename):
raise ValueError("Answer file " + filename + " does not exist")
with io.open(filename, 'r') as f:
answers = {line.rstrip('\n') for line in f}
if filtered_by:
answers = {kw for kw in answers if kw in filtered_by}
return answers
def calculate_label_distribution(data_dir, filtered_by=None):
"""
Calculate the distribution of labels in a directory. Function can be used
to find the most frequent and not used labels, so that the target
vocabulary can be trimmed accordingly.
:param data_dir: directory path with the .lab files
:param filtered_by: a set of labels that defines the vocabulary
:return: list of KV pairs of the form (14, ['lab1', 'lab2']), which means
that both lab1 and lab2 were labels in 14 documents
"""
answers = [kw for v in get_all_answers(data_dir, filtered_by=filtered_by).values()
for kw in v]
counts = Counter(answers)
histogram = defaultdict(list)
for kw, cnt in counts.items():
histogram[cnt].append(kw)
return histogram
def calculate_number_of_labels_distribution(data_dir, filtered_by=None):
""" Look how many papers are there with 3 labels, 4 labels etc.
Return a histogram. """
answers = get_all_answers(data_dir, filtered_by=filtered_by).values()
lengths = [len(ans_set) for ans_set in answers]
return Counter(lengths).items()
def get_coverage_ratio_for_label_subset(no_of_labels, hist=None):
"""
Compute fraction of the samples we would be able to predict, if we reduce
the number of labels to a certain subset of the size no_of_labels.
:param no_of_labels: the number of labels that we limit the ontology to
:param hist: histogram of the samples.
Result of calculate_label_distribution function
:return: number of labels that we need to consider, coverage ratio
"""
hist = hist or calculate_label_distribution()
hist = sorted([(k, len(v)) for k, v in hist.items()])
total_shots = sum([x[0] * x[1] for x in hist])
labels_collected = 0
hits_collected = 0
for papers, labs in reversed(hist):
hits_collected += papers * labs
labels_collected += labs
if labels_collected >= no_of_keywords:
return labels_collected, hits_collected / float(total_shots)
return -1
def get_top_n_labels(n, hist=None):
"""
Return the n most popular labels
:param n: number of labels to return
:param hist: histogram, result of calculate_label_distribution() function
:return: sorted list of strings
"""
hist = hist or calculate_label_distribution()
labels = sorted([(k, v) for k, v in hist.items()], reverse=True)
answer = []
for _count, kws in labels:
answer.extend(kws)
if len(answer) >= n:
break
return answer[:n]
| mit |
joke2k/faker | faker/providers/internet/ru_RU/__init__.py | 2 | 1790 | from .. import Provider as InternetProvider
class Provider(InternetProvider):
user_name_formats = (
'{{last_name_female}}.{{first_name_female}}',
'{{last_name_male}}.{{first_name_male}}',
'{{last_name_male}}.{{first_name_male}}',
'{{first_name_male}}.{{last_name_male}}',
'{{first_name}}##',
'{{first_name}}_##',
'?{{last_name}}',
'{{first_name}}{{year}}',
'{{first_name}}_{{year}}',
)
email_formats = (
'{{user_name}}@{{free_email_domain}}',
'{{user_name}}@{{domain_name}}')
free_email_domains = (
'gmail.com',
'yahoo.com',
'hotmail.com',
'mail.ru',
'yandex.ru',
'rambler.ru')
tlds = ('ru', 'com', 'biz', 'info', 'net', 'org', 'edu')
replacements = (
('А', 'a'), ('Б', 'b'), ('В', 'v'), ('Г', 'g'), ('Д', 'd'), ('Е', 'e'),
('Ё', 'e'), ('Ж', 'zh'), ('З', 'z'), ('И', 'i'), ('Й', ''), ('К', 'k'),
('Л', 'l'), ('М', 'm'), ('Н', 'n'), ('О', 'o'), ('П', 'p'), ('Р', 'r'),
('С', 's'), ('Т', 't'), ('У', 'u'), ('Ф', 'f'), ('Х', 'h'), ('Ц', 'ts'),
('Ч', 'ch'), ('Ш', 'sh'), ('Щ', 'shch'), ('Ъ', ''), ('Ы', 'i'),
('Ь', ''), ('Э', 'e'), ('Ю', 'yu'), ('Я', 'ya'), ('а', 'a'), ('б', 'b'),
('в', 'v'), ('г', 'g'), ('д', 'd'), ('е', 'e'), ('ё', 'e'), ('ж', 'zh'),
('з', 'z'), ('и', 'i'), ('й', ''), ('к', 'k'), ('л', 'l'), ('м', 'm'),
('н', 'n'), ('о', 'o'), ('п', 'p'), ('р', 'r'), ('с', 's'), ('т', 't'),
('у', 'u'), ('ф', 'f'), ('х', 'h'), ('ц', 'ts'), ('ч', 'ch'),
('ш', 'sh'), ('щ', 'shch'), ('ъ', ''), ('ы', 'i'), ('ь', ''),
('э', 'e'), ('ю', 'ju'), ('я', 'ja'),
)
| mit |
mulkieran/justbytes | src/justbytes/_errors.py | 1 | 3871 | # Copyright (C) 2015 - 2019 Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; If not, see <http://www.gnu.org/licenses/>.
#
# Red Hat Author(s): Anne Mulhern <amulhern@redhat.com>
""" Exception types used by the justbytes class. """
# isort: STDLIB
import abc
class RangeError(Exception, metaclass=abc.ABCMeta):
""" Generic Range error. """
class RangeValueError(RangeError):
"""
Raised when a parameter has an unacceptable value.
May also be raised when the parameter has an unacceptable type.
"""
_FMT_STR = "value '%s' for parameter %s is unacceptable"
def __init__(self, value, param, msg=None):
"""
Initializer.
:param object value: the value
:param str param: the parameter
:param str msg: an explanatory message
"""
# pylint: disable=super-init-not-called
self.value = value
self.param = param
self.msg = msg
def __str__(self):
if self.msg:
fmt_str = self._FMT_STR + ": %s"
return fmt_str % (self.value, self.param, self.msg)
return self._FMT_STR % (self.value, self.param)
class RangeUnsupportedOpError(RangeError, metaclass=abc.ABCMeta):
""" Error when executing unsupported operation on Range. """
class RangeNonsensicalOpError(RangeUnsupportedOpError, metaclass=abc.ABCMeta):
""" Error when requesting an operation that doesn't make sense. """
class RangeNonsensicalBinOpValueError(RangeNonsensicalOpError):
""" Error when requesting a binary operation with a nonsense value. """
_FMT_STR = "nonsensical value for for %s: '%s'"
def __init__(self, operator, other):
"""
Initializer.
:param str operator: the operator
:param object other: the other argument
"""
# pylint: disable=super-init-not-called
self._operator = operator
self._other = other
def __str__(self):
return self._FMT_STR % (self._operator, self._other)
class RangeNonsensicalBinOpError(RangeNonsensicalOpError):
""" Error when requesting a binary operation that doesn't make sense. """
_FMT_STR = "nonsensical operand types for %s: 'Range' and '%s'"
def __init__(self, operator, other):
"""
Initializer.
:param str operator: the operator
:param object other: the other argument
"""
# pylint: disable=super-init-not-called
self._operator = operator
self._other = other
def __str__(self):
return self._FMT_STR % (self._operator, type(self._other).__name__)
class RangeUnrepresentableResultError(RangeUnsupportedOpError, metaclass=abc.ABCMeta):
"""
Error when requesting an operation that yields units that cannot
be represented with Range, e.g., when multiplying a Range by a Range.
"""
class RangePowerResultError(RangeUnrepresentableResultError):
""" Error when requesting an operation that would yield a byte power. """
def __str__(self):
return "requested operation result requires non-unit power of bytes"
class RangeFractionalResultError(RangeUnrepresentableResultError):
""" Error when Range construction is strict. """
def __str__(self):
return "requested operation result has a fractional quantity of bytes"
| gpl-2.0 |
rebost/django | tests/modeltests/known_related_objects/tests.py | 16 | 3669 | from __future__ import absolute_import
from django.test import TestCase
from .models import Tournament, Pool, PoolStyle
class ExistingRelatedInstancesTests(TestCase):
fixtures = ['tournament.json']
def test_foreign_key(self):
with self.assertNumQueries(2):
tournament = Tournament.objects.get(pk=1)
pool = tournament.pool_set.all()[0]
self.assertIs(tournament, pool.tournament)
def test_foreign_key_prefetch_related(self):
with self.assertNumQueries(2):
tournament = (Tournament.objects.prefetch_related('pool_set').get(pk=1))
pool = tournament.pool_set.all()[0]
self.assertIs(tournament, pool.tournament)
def test_foreign_key_multiple_prefetch(self):
with self.assertNumQueries(2):
tournaments = list(Tournament.objects.prefetch_related('pool_set'))
pool1 = tournaments[0].pool_set.all()[0]
self.assertIs(tournaments[0], pool1.tournament)
pool2 = tournaments[1].pool_set.all()[0]
self.assertIs(tournaments[1], pool2.tournament)
def test_one_to_one(self):
with self.assertNumQueries(2):
style = PoolStyle.objects.get(pk=1)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_select_related(self):
with self.assertNumQueries(1):
style = PoolStyle.objects.select_related('pool').get(pk=1)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_multi_select_related(self):
with self.assertNumQueries(1):
poolstyles = list(PoolStyle.objects.select_related('pool'))
self.assertIs(poolstyles[0], poolstyles[0].pool.poolstyle)
self.assertIs(poolstyles[1], poolstyles[1].pool.poolstyle)
def test_one_to_one_prefetch_related(self):
with self.assertNumQueries(2):
style = PoolStyle.objects.prefetch_related('pool').get(pk=1)
pool = style.pool
self.assertIs(style, pool.poolstyle)
def test_one_to_one_multi_prefetch_related(self):
with self.assertNumQueries(2):
poolstyles = list(PoolStyle.objects.prefetch_related('pool'))
self.assertIs(poolstyles[0], poolstyles[0].pool.poolstyle)
self.assertIs(poolstyles[1], poolstyles[1].pool.poolstyle)
def test_reverse_one_to_one(self):
with self.assertNumQueries(2):
pool = Pool.objects.get(pk=2)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_select_related(self):
with self.assertNumQueries(1):
pool = Pool.objects.select_related('poolstyle').get(pk=2)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_prefetch_related(self):
with self.assertNumQueries(2):
pool = Pool.objects.prefetch_related('poolstyle').get(pk=2)
style = pool.poolstyle
self.assertIs(pool, style.pool)
def test_reverse_one_to_one_multi_select_related(self):
with self.assertNumQueries(1):
pools = list(Pool.objects.select_related('poolstyle'))
self.assertIs(pools[1], pools[1].poolstyle.pool)
self.assertIs(pools[2], pools[2].poolstyle.pool)
def test_reverse_one_to_one_multi_prefetch_related(self):
with self.assertNumQueries(2):
pools = list(Pool.objects.prefetch_related('poolstyle'))
self.assertIs(pools[1], pools[1].poolstyle.pool)
self.assertIs(pools[2], pools[2].poolstyle.pool)
| bsd-3-clause |
jumpojoy/neutron | neutron/db/metering/metering_rpc.py | 46 | 2075 | # Copyright (C) 2014 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import oslo_messaging
from neutron.common import constants as consts
from neutron.common import utils
from neutron.i18n import _LE
from neutron import manager
from neutron.plugins.common import constants as service_constants
LOG = logging.getLogger(__name__)
class MeteringRpcCallbacks(object):
target = oslo_messaging.Target(version='1.0')
def __init__(self, meter_plugin):
self.meter_plugin = meter_plugin
def get_sync_data_metering(self, context, **kwargs):
l3_plugin = manager.NeutronManager.get_service_plugins().get(
service_constants.L3_ROUTER_NAT)
if not l3_plugin:
return
host = kwargs.get('host')
if not utils.is_extension_supported(
l3_plugin, consts.L3_AGENT_SCHEDULER_EXT_ALIAS) or not host:
return self.meter_plugin.get_sync_data_metering(context)
else:
agents = l3_plugin.get_l3_agents(context, filters={'host': [host]})
if not agents:
LOG.error(_LE('Unable to find agent %s.'), host)
return
routers = l3_plugin.list_routers_on_l3_agent(context, agents[0].id)
router_ids = [router['id'] for router in routers['routers']]
if not router_ids:
return
return self.meter_plugin.get_sync_data_metering(context,
router_ids=router_ids)
| apache-2.0 |
hejuna/bite-project | deps/gdata-python-client/samples/apps/marketplace_sample/gdata/apps/audit/service.py | 93 | 9510 | # Copyright (C) 2008 Google, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Allow Google Apps domain administrators to audit user data.
AuditService: Set auditing."""
__author__ = 'jlee@pbu.edu'
from base64 import b64encode
import gdata.apps
import gdata.apps.service
import gdata.service
class AuditService(gdata.apps.service.PropertyService):
"""Client for the Google Apps Audit service."""
def _serviceUrl(self, setting_id, domain=None, user=None):
if domain is None:
domain = self.domain
if user is None:
return '/a/feeds/compliance/audit/%s/%s' % (setting_id, domain)
else:
return '/a/feeds/compliance/audit/%s/%s/%s' % (setting_id, domain, user)
def updatePGPKey(self, pgpkey):
"""Updates Public PGP Key Google uses to encrypt audit data
Args:
pgpkey: string, ASCII text of PGP Public Key to be used
Returns:
A dict containing the result of the POST operation."""
uri = self._serviceUrl('publickey')
b64pgpkey = b64encode(pgpkey)
properties = {}
properties['publicKey'] = b64pgpkey
return self._PostProperties(uri, properties)
def createEmailMonitor(self, source_user, destination_user, end_date,
begin_date=None, incoming_headers_only=False,
outgoing_headers_only=False, drafts=False,
drafts_headers_only=False, chats=False,
chats_headers_only=False):
"""Creates a email monitor, forwarding the source_users emails/chats
Args:
source_user: string, the user whose email will be audited
destination_user: string, the user to receive the audited email
end_date: string, the date the audit will end in
"yyyy-MM-dd HH:mm" format, required
begin_date: string, the date the audit will start in
"yyyy-MM-dd HH:mm" format, leave blank to use current time
incoming_headers_only: boolean, whether to audit only the headers of
mail delivered to source user
outgoing_headers_only: boolean, whether to audit only the headers of
mail sent from the source user
drafts: boolean, whether to audit draft messages of the source user
drafts_headers_only: boolean, whether to audit only the headers of
mail drafts saved by the user
chats: boolean, whether to audit archived chats of the source user
chats_headers_only: boolean, whether to audit only the headers of
archived chats of the source user
Returns:
A dict containing the result of the POST operation."""
uri = self._serviceUrl('mail/monitor', user=source_user)
properties = {}
properties['destUserName'] = destination_user
if begin_date is not None:
properties['beginDate'] = begin_date
properties['endDate'] = end_date
if incoming_headers_only:
properties['incomingEmailMonitorLevel'] = 'HEADER_ONLY'
else:
properties['incomingEmailMonitorLevel'] = 'FULL_MESSAGE'
if outgoing_headers_only:
properties['outgoingEmailMonitorLevel'] = 'HEADER_ONLY'
else:
properties['outgoingEmailMonitorLevel'] = 'FULL_MESSAGE'
if drafts:
if drafts_headers_only:
properties['draftMonitorLevel'] = 'HEADER_ONLY'
else:
properties['draftMonitorLevel'] = 'FULL_MESSAGE'
if chats:
if chats_headers_only:
properties['chatMonitorLevel'] = 'HEADER_ONLY'
else:
properties['chatMonitorLevel'] = 'FULL_MESSAGE'
return self._PostProperties(uri, properties)
def getEmailMonitors(self, user):
""""Gets the email monitors for the given user
Args:
user: string, the user to retrieve email monitors for
Returns:
list results of the POST operation
"""
uri = self._serviceUrl('mail/monitor', user=user)
return self._GetPropertiesList(uri)
def deleteEmailMonitor(self, source_user, destination_user):
"""Deletes the email monitor for the given user
Args:
source_user: string, the user who is being monitored
destination_user: string, theuser who recieves the monitored emails
Returns:
Nothing
"""
uri = self._serviceUrl('mail/monitor', user=source_user+'/'+destination_user)
try:
return self._DeleteProperties(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def createAccountInformationRequest(self, user):
"""Creates a request for account auditing details
Args:
user: string, the user to request account information for
Returns:
A dict containing the result of the post operation."""
uri = self._serviceUrl('account', user=user)
properties = {}
#XML Body is left empty
try:
return self._PostProperties(uri, properties)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def getAccountInformationRequestStatus(self, user, request_id):
"""Gets the status of an account auditing request
Args:
user: string, the user whose account auditing details were requested
request_id: string, the request_id
Returns:
A dict containing the result of the get operation."""
uri = self._serviceUrl('account', user=user+'/'+request_id)
try:
return self._GetProperties(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def getAllAccountInformationRequestsStatus(self):
"""Gets the status of all account auditing requests for the domain
Args:
None
Returns:
list results of the POST operation
"""
uri = self._serviceUrl('account')
return self._GetPropertiesList(uri)
def deleteAccountInformationRequest(self, user, request_id):
"""Deletes the request for account auditing information
Args:
user: string, the user whose account auditing details were requested
request_id: string, the request_id
Returns:
Nothing
"""
uri = self._serviceUrl('account', user=user+'/'+request_id)
try:
return self._DeleteProperties(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def createMailboxExportRequest(self, user, begin_date=None, end_date=None, include_deleted=False, search_query=None, headers_only=False):
"""Creates a mailbox export request
Args:
user: string, the user whose mailbox export is being requested
begin_date: string, date of earliest emails to export, optional, defaults to date of account creation
format is 'yyyy-MM-dd HH:mm'
end_date: string, date of latest emails to export, optional, defaults to current date
format is 'yyyy-MM-dd HH:mm'
include_deleted: boolean, whether to include deleted emails in export, mutually exclusive with search_query
search_query: string, gmail style search query, matched emails will be exported, mutually exclusive with include_deleted
Returns:
A dict containing the result of the post operation."""
uri = self._serviceUrl('mail/export', user=user)
properties = {}
if begin_date is not None:
properties['beginDate'] = begin_date
if end_date is not None:
properties['endDate'] = end_date
if include_deleted is not None:
properties['includeDeleted'] = gdata.apps.service._bool2str(include_deleted)
if search_query is not None:
properties['searchQuery'] = search_query
if headers_only is True:
properties['packageContent'] = 'HEADER_ONLY'
else:
properties['packageContent'] = 'FULL_MESSAGE'
return self._PostProperties(uri, properties)
def getMailboxExportRequestStatus(self, user, request_id):
"""Gets the status of an mailbox export request
Args:
user: string, the user whose mailbox were requested
request_id: string, the request_id
Returns:
A dict containing the result of the get operation."""
uri = self._serviceUrl('mail/export', user=user+'/'+request_id)
try:
return self._GetProperties(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
def getAllMailboxExportRequestsStatus(self):
"""Gets the status of all mailbox export requests for the domain
Args:
None
Returns:
list results of the POST operation
"""
uri = self._serviceUrl('mail/export')
return self._GetPropertiesList(uri)
def deleteMailboxExportRequest(self, user, request_id):
"""Deletes the request for mailbox export
Args:
user: string, the user whose mailbox were requested
request_id: string, the request_id
Returns:
Nothing
"""
uri = self._serviceUrl('mail/export', user=user+'/'+request_id)
try:
return self._DeleteProperties(uri)
except gdata.service.RequestError, e:
raise AppsForYourDomainException(e.args[0])
| apache-2.0 |
WillGuan105/django | django/utils/functional.py | 234 | 13622 | import copy
import operator
from functools import total_ordering, wraps
from django.utils import six
from django.utils.six.moves import copyreg
# You can't trivially replace this with `functools.partial` because this binds
# to classes and returns bound instances, whereas functools.partial (on
# CPython) is a type and its instances don't bind.
def curry(_curried_func, *args, **kwargs):
def _curried(*moreargs, **morekwargs):
return _curried_func(*(args + moreargs), **dict(kwargs, **morekwargs))
return _curried
class cached_property(object):
"""
Decorator that converts a method with a single self argument into a
property cached on the instance.
Optional ``name`` argument allows you to make cached properties of other
methods. (e.g. url = cached_property(get_absolute_url, name='url') )
"""
def __init__(self, func, name=None):
self.func = func
self.__doc__ = getattr(func, '__doc__')
self.name = name or func.__name__
def __get__(self, instance, type=None):
if instance is None:
return self
res = instance.__dict__[self.name] = self.func(instance)
return res
class Promise(object):
"""
This is just a base class for the proxy class created in
the closure of the lazy function. It can be used to recognize
promises in code.
"""
pass
def lazy(func, *resultclasses):
"""
Turns any callable into a lazy evaluated callable. You need to give result
classes or types -- at least one is needed so that the automatic forcing of
the lazy evaluation code is triggered. Results are not memoized; the
function is evaluated on every access.
"""
@total_ordering
class __proxy__(Promise):
"""
Encapsulate a function call and act as a proxy for methods that are
called on the result of that function. The function is not evaluated
until one of the methods on the result is called.
"""
__prepared = False
def __init__(self, args, kw):
self.__args = args
self.__kw = kw
if not self.__prepared:
self.__prepare_class__()
self.__prepared = True
def __reduce__(self):
return (
_lazy_proxy_unpickle,
(func, self.__args, self.__kw) + resultclasses
)
@classmethod
def __prepare_class__(cls):
for resultclass in resultclasses:
for type_ in resultclass.mro():
for method_name in type_.__dict__.keys():
# All __promise__ return the same wrapper method, they
# look up the correct implementation when called.
if hasattr(cls, method_name):
continue
meth = cls.__promise__(method_name)
setattr(cls, method_name, meth)
cls._delegate_bytes = bytes in resultclasses
cls._delegate_text = six.text_type in resultclasses
assert not (cls._delegate_bytes and cls._delegate_text), (
"Cannot call lazy() with both bytes and text return types.")
if cls._delegate_text:
if six.PY3:
cls.__str__ = cls.__text_cast
else:
cls.__unicode__ = cls.__text_cast
cls.__str__ = cls.__bytes_cast_encoded
elif cls._delegate_bytes:
if six.PY3:
cls.__bytes__ = cls.__bytes_cast
else:
cls.__str__ = cls.__bytes_cast
@classmethod
def __promise__(cls, method_name):
# Builds a wrapper around some magic method
def __wrapper__(self, *args, **kw):
# Automatically triggers the evaluation of a lazy value and
# applies the given magic method of the result type.
res = func(*self.__args, **self.__kw)
return getattr(res, method_name)(*args, **kw)
return __wrapper__
def __text_cast(self):
return func(*self.__args, **self.__kw)
def __bytes_cast(self):
return bytes(func(*self.__args, **self.__kw))
def __bytes_cast_encoded(self):
return func(*self.__args, **self.__kw).encode('utf-8')
def __cast(self):
if self._delegate_bytes:
return self.__bytes_cast()
elif self._delegate_text:
return self.__text_cast()
else:
return func(*self.__args, **self.__kw)
def __str__(self):
# object defines __str__(), so __prepare_class__() won't overload
# a __str__() method from the proxied class.
return str(self.__cast())
def __ne__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() != other
def __eq__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() == other
def __lt__(self, other):
if isinstance(other, Promise):
other = other.__cast()
return self.__cast() < other
def __hash__(self):
return hash(self.__cast())
def __mod__(self, rhs):
if self._delegate_bytes and six.PY2:
return bytes(self) % rhs
elif self._delegate_text:
return six.text_type(self) % rhs
return self.__cast() % rhs
def __deepcopy__(self, memo):
# Instances of this class are effectively immutable. It's just a
# collection of functions. So we don't need to do anything
# complicated for copying.
memo[id(self)] = self
return self
@wraps(func)
def __wrapper__(*args, **kw):
# Creates the proxy object, instead of the actual value.
return __proxy__(args, kw)
return __wrapper__
def _lazy_proxy_unpickle(func, args, kwargs, *resultclasses):
return lazy(func, *resultclasses)(*args, **kwargs)
def allow_lazy(func, *resultclasses):
"""
A decorator that allows a function to be called with one or more lazy
arguments. If none of the args are lazy, the function is evaluated
immediately, otherwise a __proxy__ is returned that will evaluate the
function when needed.
"""
lazy_func = lazy(func, *resultclasses)
@wraps(func)
def wrapper(*args, **kwargs):
for arg in list(args) + list(kwargs.values()):
if isinstance(arg, Promise):
break
else:
return func(*args, **kwargs)
return lazy_func(*args, **kwargs)
return wrapper
empty = object()
def new_method_proxy(func):
def inner(self, *args):
if self._wrapped is empty:
self._setup()
return func(self._wrapped, *args)
return inner
class LazyObject(object):
"""
A wrapper for another class that can be used to delay instantiation of the
wrapped class.
By subclassing, you have the opportunity to intercept and alter the
instantiation. If you don't need to do that, use SimpleLazyObject.
"""
# Avoid infinite recursion when tracing __init__ (#19456).
_wrapped = None
def __init__(self):
self._wrapped = empty
__getattr__ = new_method_proxy(getattr)
def __setattr__(self, name, value):
if name == "_wrapped":
# Assign to __dict__ to avoid infinite __setattr__ loops.
self.__dict__["_wrapped"] = value
else:
if self._wrapped is empty:
self._setup()
setattr(self._wrapped, name, value)
def __delattr__(self, name):
if name == "_wrapped":
raise TypeError("can't delete _wrapped.")
if self._wrapped is empty:
self._setup()
delattr(self._wrapped, name)
def _setup(self):
"""
Must be implemented by subclasses to initialize the wrapped object.
"""
raise NotImplementedError('subclasses of LazyObject must provide a _setup() method')
# Because we have messed with __class__ below, we confuse pickle as to what
# class we are pickling. It also appears to stop __reduce__ from being
# called. So, we define __getstate__ in a way that cooperates with the way
# that pickle interprets this class. This fails when the wrapped class is
# a builtin, but it is better than nothing.
def __getstate__(self):
if self._wrapped is empty:
self._setup()
return self._wrapped.__dict__
# Python 3 will call __reduce__ when pickling; this method is needed
# to serialize and deserialize correctly.
@classmethod
def __newobj__(cls, *args):
return cls.__new__(cls, *args)
def __reduce_ex__(self, proto):
if proto >= 2:
# On Py3, since the default protocol is 3, pickle uses the
# ``__newobj__`` method (& more efficient opcodes) for writing.
return (self.__newobj__, (self.__class__,), self.__getstate__())
else:
# On Py2, the default protocol is 0 (for back-compat) & the above
# code fails miserably (see regression test). Instead, we return
# exactly what's returned if there's no ``__reduce__`` method at
# all.
return (copyreg._reconstructor, (self.__class__, object, None), self.__getstate__())
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use type(self), not self.__class__, because the
# latter is proxied.
result = type(self)()
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
if six.PY3:
__bytes__ = new_method_proxy(bytes)
__str__ = new_method_proxy(str)
__bool__ = new_method_proxy(bool)
else:
__str__ = new_method_proxy(str)
__unicode__ = new_method_proxy(unicode) # NOQA: unicode undefined on PY3
__nonzero__ = new_method_proxy(bool)
# Introspection support
__dir__ = new_method_proxy(dir)
# Need to pretend to be the wrapped class, for the sake of objects that
# care about this (especially in equality tests)
__class__ = property(new_method_proxy(operator.attrgetter("__class__")))
__eq__ = new_method_proxy(operator.eq)
__ne__ = new_method_proxy(operator.ne)
__hash__ = new_method_proxy(hash)
# List/Tuple/Dictionary methods support
__getitem__ = new_method_proxy(operator.getitem)
__setitem__ = new_method_proxy(operator.setitem)
__delitem__ = new_method_proxy(operator.delitem)
__iter__ = new_method_proxy(iter)
__len__ = new_method_proxy(len)
__contains__ = new_method_proxy(operator.contains)
# Workaround for http://bugs.python.org/issue12370
_super = super
class SimpleLazyObject(LazyObject):
"""
A lazy object initialized from any function.
Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy.
"""
def __init__(self, func):
"""
Pass in a callable that returns the object to be wrapped.
If copies are made of the resulting SimpleLazyObject, which can happen
in various circumstances within Django, then you must ensure that the
callable can be safely run more than once and will return the same
value.
"""
self.__dict__['_setupfunc'] = func
_super(SimpleLazyObject, self).__init__()
def _setup(self):
self._wrapped = self._setupfunc()
# Return a meaningful representation of the lazy object for debugging
# without evaluating the wrapped object.
def __repr__(self):
if self._wrapped is empty:
repr_attr = self._setupfunc
else:
repr_attr = self._wrapped
return '<%s: %r>' % (type(self).__name__, repr_attr)
def __deepcopy__(self, memo):
if self._wrapped is empty:
# We have to use SimpleLazyObject, not self.__class__, because the
# latter is proxied.
result = SimpleLazyObject(self._setupfunc)
memo[id(self)] = result
return result
return copy.deepcopy(self._wrapped, memo)
class lazy_property(property):
"""
A property that works with subclasses by wrapping the decorated
functions of the base class.
"""
def __new__(cls, fget=None, fset=None, fdel=None, doc=None):
if fget is not None:
@wraps(fget)
def fget(instance, instance_type=None, name=fget.__name__):
return getattr(instance, name)()
if fset is not None:
@wraps(fset)
def fset(instance, value, name=fset.__name__):
return getattr(instance, name)(value)
if fdel is not None:
@wraps(fdel)
def fdel(instance, name=fdel.__name__):
return getattr(instance, name)()
return property(fget, fset, fdel, doc)
def partition(predicate, values):
"""
Splits the values into two sets, based on the return value of the function
(True/False). e.g.:
>>> partition(lambda x: x > 3, range(5))
[0, 1, 2, 3], [4]
"""
results = ([], [])
for item in values:
results[predicate(item)].append(item)
return results
| bsd-3-clause |
MisterTea/HyperNEAT | boost_1_57_0/tools/build/src/util/order.py | 9 | 4322 | # Copyright (C) 2003 Vladimir Prus
# Use, modification, and distribution is subject to the Boost Software
# License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy
# at http://www.boost.org/LICENSE_1_0.txt)
class Order:
"""Allows ordering arbitrary objects with regard to arbitrary binary relation.
The primary use case is the gcc toolset, which is sensitive to
library order: if library 'a' uses symbols from library 'b',
then 'a' must be present before 'b' on the linker's command line.
This requirement can be lifted for gcc with GNU ld, but for gcc with
Solaris LD (and for Solaris toolset as well), the order always matters.
So, we need to store order requirements and then order libraries
according to them. It it not possible to use dependency graph as
order requirements. What we need is "use symbols" relationship
while dependency graph provides "needs to be updated" relationship.
For example::
lib a : a.cpp b;
lib b ;
For static linking, the 'a' library need not depend on 'b'. However, it
still should come before 'b' on the command line.
"""
def __init__ (self):
self.constraints_ = []
def add_pair (self, first, second):
""" Adds the constraint that 'first' should precede 'second'.
"""
self.constraints_.append ((first, second))
def order (self, objects):
""" Given a list of objects, reorder them so that the constains specified
by 'add_pair' are satisfied.
The algorithm was adopted from an awk script by Nikita Youshchenko
(yoush at cs dot msu dot su)
"""
# The algorithm used is the same is standard transitive closure,
# except that we're not keeping in-degree for all vertices, but
# rather removing edges.
result = []
if not objects:
return result
constraints = self.__eliminate_unused_constraits (objects)
# Find some library that nobody depends upon and add it to
# the 'result' array.
obj = None
while objects:
new_objects = []
while objects:
obj = objects [0]
if self.__has_no_dependents (obj, constraints):
# Emulate break ;
new_objects.extend (objects [1:])
objects = []
else:
new_objects.append (obj)
obj = None
objects = objects [1:]
if not obj:
raise BaseException ("Circular order dependencies")
# No problem with placing first.
result.append (obj)
# Remove all containts where 'obj' comes first,
# since they are already satisfied.
constraints = self.__remove_satisfied (constraints, obj)
# Add the remaining objects for further processing
# on the next iteration
objects = new_objects
return result
def __eliminate_unused_constraits (self, objects):
""" Eliminate constraints which mention objects not in 'objects'.
In graph-theory terms, this is finding subgraph induced by
ordered vertices.
"""
result = []
for c in self.constraints_:
if c [0] in objects and c [1] in objects:
result.append (c)
return result
def __has_no_dependents (self, obj, constraints):
""" Returns true if there's no constraint in 'constraints' where
'obj' comes second.
"""
failed = False
while constraints and not failed:
c = constraints [0]
if c [1] == obj:
failed = True
constraints = constraints [1:]
return not failed
def __remove_satisfied (self, constraints, obj):
result = []
for c in constraints:
if c [0] != obj:
result.append (c)
return result
| bsd-3-clause |
eneldoserrata/marcos_openerp | addons/fetchmail/fetchmail.py | 13 | 15320 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
from imaplib import IMAP4
from imaplib import IMAP4_SSL
from poplib import POP3
from poplib import POP3_SSL
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import zipfile
import base64
from openerp import addons
from openerp import netsvc
from openerp.osv import fields, osv
from openerp import tools
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class fetchmail_server(osv.osv):
"""Incoming POP/IMAP mail server account"""
_name = 'fetchmail.server'
_description = "POP/IMAP Server"
_order = 'priority'
_columns = {
'name':fields.char('Name', size=256, required=True, readonly=False),
'active':fields.boolean('Active', required=False),
'state':fields.selection([
('draft', 'Not Confirmed'),
('done', 'Confirmed'),
], 'Status', select=True, readonly=True),
'server' : fields.char('Server Name', size=256, readonly=True, help="Hostname or IP of the mail server", states={'draft':[('readonly', False)]}),
'port' : fields.integer('Port', readonly=True, states={'draft':[('readonly', False)]}),
'type':fields.selection([
('pop', 'POP Server'),
('imap', 'IMAP Server'),
('local', 'Local Server'),
], 'Server Type', select=True, required=True, readonly=False),
'is_ssl':fields.boolean('SSL/TLS', help="Connections are encrypted with SSL/TLS through a dedicated port (default: IMAPS=993, POP3S=995)"),
'attach':fields.boolean('Keep Attachments', help="Whether attachments should be downloaded. "
"If not enabled, incoming emails will be stripped of any attachments before being processed"),
'original':fields.boolean('Keep Original', help="Whether a full original copy of each email should be kept for reference"
"and attached to each processed message. This will usually double the size of your message database."),
'date': fields.datetime('Last Fetch Date', readonly=True),
'user' : fields.char('Username', size=256, readonly=True, states={'draft':[('readonly', False)]}),
'password' : fields.char('Password', size=1024, readonly=True, states={'draft':[('readonly', False)]}),
'action_id':fields.many2one('ir.actions.server', 'Server Action', help="Optional custom server action to trigger for each incoming mail, "
"on the record that was created or updated by this mail"),
'object_id': fields.many2one('ir.model', "Create a New Record", help="Process each incoming mail as part of a conversation "
"corresponding to this document type. This will create "
"new documents for new conversations, or attach follow-up "
"emails to the existing conversations (documents)."),
'priority': fields.integer('Server Priority', readonly=True, states={'draft':[('readonly', False)]}, help="Defines the order of processing, "
"lower values mean higher priority"),
'message_ids': fields.one2many('mail.mail', 'fetchmail_server_id', 'Messages', readonly=True),
'configuration' : fields.text('Configuration', readonly=True),
'script' : fields.char('Script', readonly=True, size=64),
}
_defaults = {
'state': "draft",
'type': "pop",
'active': True,
'priority': 5,
'attach': True,
'script': '/mail/static/scripts/openerp_mailgate.py',
}
def onchange_server_type(self, cr, uid, ids, server_type=False, ssl=False, object_id=False):
port = 0
values = {}
if server_type == 'pop':
port = ssl and 995 or 110
elif server_type == 'imap':
port = ssl and 993 or 143
else:
values['server'] = ''
values['port'] = port
conf = {
'dbname' : cr.dbname,
'uid' : uid,
'model' : 'MODELNAME',
}
if object_id:
m = self.pool.get('ir.model')
r = m.read(cr,uid,[object_id],['model'])
conf['model']=r[0]['model']
values['configuration'] = """Use the below script with the following command line options with your Mail Transport Agent (MTA)
openerp_mailgate.py --host=HOSTNAME --port=PORT -u %(uid)d -p PASSWORD -d %(dbname)s
Example configuration for the postfix mta running locally:
/etc/postfix/virtual_aliases:
@youdomain openerp_mailgate@localhost
/etc/aliases:
openerp_mailgate: "|/path/to/openerp-mailgate.py --host=localhost -u %(uid)d -p PASSWORD -d %(dbname)s"
""" % conf
return {'value':values}
def set_draft(self, cr, uid, ids, context=None):
self.write(cr, uid, ids , {'state':'draft'})
return True
def connect(self, cr, uid, server_id, context=None):
if isinstance(server_id, (list,tuple)):
server_id = server_id[0]
server = self.browse(cr, uid, server_id, context)
if server.type == 'imap':
if server.is_ssl:
connection = IMAP4_SSL(server.server, int(server.port))
else:
connection = IMAP4(server.server, int(server.port))
connection.login(server.user, server.password)
elif server.type == 'pop':
if server.is_ssl:
connection = POP3_SSL(server.server, int(server.port))
else:
connection = POP3(server.server, int(server.port))
#TODO: use this to remove only unread messages
#connection.user("recent:"+server.user)
connection.user(server.user)
connection.pass_(server.password)
return connection
def button_confirm_login(self, cr, uid, ids, context=None):
if context is None:
context = {}
for server in self.browse(cr, uid, ids, context=context):
try:
connection = server.connect()
server.write({'state':'done'})
except Exception, e:
_logger.exception("Failed to connect to %s server %s.", server.type, server.name)
raise osv.except_osv(_("Connection test failed!"), _("Here is what we got instead:\n %s.") % tools.ustr(e))
finally:
try:
if connection:
if server.type == 'imap':
connection.close()
elif server.type == 'pop':
connection.quit()
except Exception:
# ignored, just a consequence of the previous exception
pass
return True
def _fetch_mails(self, cr, uid, ids=False, context=None):
if not ids:
ids = self.search(cr, uid, [('state','=','done'),('type','in',['pop','imap'])])
return self.fetch_mail(cr, uid, ids, context=context)
def fetch_mail(self, cr, uid, ids, context=None):
"""WARNING: meant for cron usage only - will commit() after each email!"""
if context is None:
context = {}
context['fetchmail_cron_running'] = True
mail_thread = self.pool.get('mail.thread')
action_pool = self.pool.get('ir.actions.server')
for server in self.browse(cr, uid, ids, context=context):
_logger.info('start checking for new emails on %s server %s', server.type, server.name)
context.update({'fetchmail_server_id': server.id, 'server_type': server.type})
count, failed = 0, 0
imap_server = False
pop_server = False
if server.type == 'imap':
try:
imap_server = server.connect()
imap_server.select()
result, data = imap_server.search(None, '(UNSEEN)')
for num in data[0].split():
res_id = None
result, data = imap_server.fetch(num, '(RFC822)')
imap_server.store(num, '-FLAGS', '\\Seen')
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
data[0][1],
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
except Exception:
_logger.exception('Failed to process mail from %s server %s.', server.type, server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
imap_server.store(num, '+FLAGS', '\\Seen')
cr.commit()
count += 1
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", count, server.type, server.name, (count - failed), failed)
except Exception:
_logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name)
finally:
if imap_server:
imap_server.close()
imap_server.logout()
elif server.type == 'pop':
try:
pop_server = server.connect()
(numMsgs, totalSize) = pop_server.stat()
pop_server.list()
for num in range(1, numMsgs + 1):
(header, msges, octets) = pop_server.retr(num)
msg = '\n'.join(msges)
res_id = None
try:
res_id = mail_thread.message_process(cr, uid, server.object_id.model,
msg,
save_original=server.original,
strip_attachments=(not server.attach),
context=context)
except Exception:
_logger.exception('Failed to process mail from %s server %s.', server.type, server.name)
failed += 1
if res_id and server.action_id:
action_pool.run(cr, uid, [server.action_id.id], {'active_id': res_id, 'active_ids': [res_id], 'active_model': context.get("thread_model", server.object_id.model)})
pop_server.dele(num)
cr.commit()
_logger.info("Fetched %d email(s) on %s server %s; %d succeeded, %d failed.", numMsgs, server.type, server.name, (numMsgs - failed), failed)
except Exception:
_logger.exception("General failure when trying to fetch mail from %s server %s.", server.type, server.name)
finally:
if pop_server:
pop_server.quit()
server.write({'date': time.strftime(tools.DEFAULT_SERVER_DATETIME_FORMAT)})
return True
def cron_update(self, cr, uid, context=None):
if context is None:
context = {}
if not context.get('fetchmail_cron_running'):
# Enabled/Disable cron based on the number of 'done' server of type pop or imap
ids = self.search(cr, uid, [('state','=','done'),('type','in',['pop','imap'])])
try:
cron_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'fetchmail', 'ir_cron_mail_gateway_action')[1]
self.pool.get('ir.cron').write(cr, 1, [cron_id], {'active': bool(ids)})
except ValueError:
# Nevermind if default cron cannot be found
pass
def create(self, cr, uid, values, context=None):
res = super(fetchmail_server, self).create(cr, uid, values, context=context)
self.cron_update(cr, uid, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
res = super(fetchmail_server, self).write(cr, uid, ids, values, context=context)
self.cron_update(cr, uid, context=context)
return res
class mail_mail(osv.osv):
_inherit = "mail.mail"
_columns = {
'fetchmail_server_id': fields.many2one('fetchmail.server', "Inbound Mail Server",
readonly=True,
select=True,
oldname='server_id'),
}
def create(self, cr, uid, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).create(cr, uid, values, context=context)
return res
def write(self, cr, uid, ids, values, context=None):
if context is None:
context = {}
fetchmail_server_id = context.get('fetchmail_server_id')
if fetchmail_server_id:
values['fetchmail_server_id'] = fetchmail_server_id
res = super(mail_mail, self).write(cr, uid, ids, values, context=context)
return res
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.