repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
Juniper/nova | nova/tests/unit/virt/libvirt/volume/test_vrtshyperscale.py | 2 | 3191 | # Copyright (c) 2017 Veritas Technologies LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from os_brick.initiator import connector
from nova.tests.unit.virt.libvirt.volume import test_volume
from nova.virt.libvirt.volume import vrtshyperscale
DEVICE_NAME = '{8ee71c33-dcd0-4267-8f2b-e0742ecabe9f}'
DEVICE_PATH = '/dev/8ee71c33-dcd0-4267-8f2b-e0742ec'
class LibvirtHyperScaleVolumeDriverTestCase(
test_volume.LibvirtVolumeBaseTestCase):
def test_driver_init(self):
hs = vrtshyperscale.LibvirtHyperScaleVolumeDriver(self.fake_host)
self.assertIsInstance(hs.connector, connector.HyperScaleConnector)
def test_get_config(self):
hs = vrtshyperscale.LibvirtHyperScaleVolumeDriver(self.fake_host)
# expect valid conf is returned if called with proper arguments
disk_info = {'name': DEVICE_NAME,
'type': None,
'dev': None,
'bus': None,
'device_path': DEVICE_PATH,
}
conn = {'data': disk_info}
conf = hs.get_config(conn, disk_info)
self.assertEqual("block", conf.source_type)
self.assertEqual(DEVICE_PATH, conf.source_path)
@mock.patch('os_brick.initiator.connectors.vrtshyperscale'
'.HyperScaleConnector.connect_volume')
def test_connect_volume(self, mock_brick_connect_volume):
mock_brick_connect_volume.return_value = {'path': DEVICE_PATH}
hs = vrtshyperscale.LibvirtHyperScaleVolumeDriver(self.fake_host)
# dummy arguments are just passed through to mock connector
disk_info = {'name': DEVICE_NAME}
connection_info = {'data': disk_info}
hs.connect_volume(
connection_info, disk_info, mock.sentinel.instance)
# expect connect_volume to add device_path to connection_info:
self.assertEqual(connection_info['data']['device_path'], DEVICE_PATH)
@mock.patch('os_brick.initiator.connectors.vrtshyperscale'
'.HyperScaleConnector.disconnect_volume')
def test_disconnect_volume(self, mock_brick_disconnect_volume):
mock_brick_disconnect_volume.return_value = None
hs = vrtshyperscale.LibvirtHyperScaleVolumeDriver(self.fake_host)
# dummy arguments are just passed through to mock connector
disk_info = {'name': DEVICE_NAME}
connection_info = {'data': disk_info}
hs.disconnect_volume(
connection_info, disk_info, mock.sentinel.instance)
hs.connector.disconnect_volume.assert_called_once_with(
connection_info['data'], None)
| apache-2.0 |
smasala/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/style/checkers/test_expectations.py | 126 | 4421 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Checks WebKit style for test_expectations files."""
import logging
import optparse
import os
import re
import sys
from common import TabChecker
from webkitpy.common.host import Host
from webkitpy.layout_tests.models.test_expectations import TestExpectationParser
_log = logging.getLogger(__name__)
class TestExpectationsChecker(object):
"""Processes TestExpectations lines for validating the syntax."""
categories = set(['test/expectations'])
def _determine_port_from_expectations_path(self, host, expectations_path):
# Pass a configuration to avoid calling default_configuration() when initializing the port (takes 0.5 seconds on a Mac Pro!).
options_wk1 = optparse.Values({'configuration': 'Release', 'webkit_test_runner': False})
options_wk2 = optparse.Values({'configuration': 'Release', 'webkit_test_runner': True})
for port_name in host.port_factory.all_port_names():
ports = [host.port_factory.get(port_name, options=options_wk1), host.port_factory.get(port_name, options=options_wk2)]
for port in ports:
for test_expectation_file in port.expectations_files():
if test_expectation_file.replace(port.path_from_webkit_base() + host.filesystem.sep, '') == expectations_path:
return port
return None
def __init__(self, file_path, handle_style_error, host=None):
self._file_path = file_path
self._handle_style_error = handle_style_error
self._tab_checker = TabChecker(file_path, handle_style_error)
# FIXME: host should be a required parameter, not an optional one.
host = host or Host()
host.initialize_scm()
self._port_obj = self._determine_port_from_expectations_path(host, file_path)
# Suppress error messages of test_expectations module since they will be reported later.
log = logging.getLogger("webkitpy.layout_tests.layout_package.test_expectations")
log.setLevel(logging.CRITICAL)
def _handle_error_message(self, lineno, message, confidence):
pass
def check_test_expectations(self, expectations_str, tests=None):
parser = TestExpectationParser(self._port_obj, tests, allow_rebaseline_modifier=False)
expectations = parser.parse('expectations', expectations_str)
level = 5
for expectation_line in expectations:
for warning in expectation_line.warnings:
self._handle_style_error(expectation_line.line_number, 'test/expectations', level, warning)
def check_tabs(self, lines):
self._tab_checker.check(lines)
def check(self, lines):
expectations = '\n'.join(lines)
if self._port_obj:
self.check_test_expectations(expectations_str=expectations, tests=None)
# Warn tabs in lines as well
self.check_tabs(lines)
| bsd-3-clause |
maxplanck-ie/HiCExplorer | hicexplorer/test/long_run/test_hicAggregateContacts_trivial_runs_two.py | 1 | 3482 | import warnings
warnings.simplefilter(action="ignore", category=RuntimeWarning)
warnings.simplefilter(action="ignore", category=PendingDeprecationWarning)
import pytest
from tempfile import NamedTemporaryFile
import os
from psutil import virtual_memory
import hicexplorer.hicAggregateContacts
mem = virtual_memory()
memory = mem.total / 2**30
# memory in GB the test computer needs to have to run the test case
LOW_MEMORY = 2
MID_MEMORY = 7
HIGH_MEMORY = 200
REMOVE_OUTPUT = True
# Some definitions needed for tests
ROOT = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "test_data/")
# test_AggregateContacts
matrix = ROOT + 'Li_et_al_2015.h5'
# matrix = ROOT + 'R1_R2_1000.h5'
BED = ROOT + 'hicAggregateContacts/test_regions.bed'
BED2 = ROOT + 'hicAggregateContacts/test_regions.bed'
outfile_aggregate_plots = NamedTemporaryFile(suffix='.png', prefix='hicaggregate_test_', delete=False)
diagnosticHeatmapFile = NamedTemporaryFile(suffix='.png', prefix='hicaggregate_heatmap_', delete=False)
@pytest.mark.skipif(MID_MEMORY > memory,
reason="Travis has too less memory to run it.")
@pytest.mark.parametrize("matrix", [matrix]) # required
@pytest.mark.parametrize("outFileName", [outfile_aggregate_plots]) # required
@pytest.mark.parametrize("BED", [BED]) # required
@pytest.mark.parametrize("ran", ['50000:900000']) # required
@pytest.mark.parametrize("BED2", [BED2])
@pytest.mark.parametrize("numberOfBins", [30])
@pytest.mark.parametrize("transform", ['total-counts', 'z-score', 'obs/exp', 'none'])
@pytest.mark.parametrize("avgType", ['mean', 'median'])
@pytest.mark.parametrize("outFilePrefixMatrix", ['outFilePrefix'])
@pytest.mark.parametrize("outFileContactPairs", ['outFileContactPairs'])
@pytest.mark.parametrize("diagnosticHeatmapFile", [diagnosticHeatmapFile])
@pytest.mark.parametrize("kmeans", [4])
@pytest.mark.parametrize("hclust", [4])
@pytest.mark.parametrize("howToCluster", ['full', 'center', 'diagonal'])
@pytest.mark.parametrize("chromosomes", ['X'])
@pytest.mark.parametrize("colorMap", ['RdYlBu_r'])
@pytest.mark.parametrize("plotType", ['2d', '3d'])
@pytest.mark.parametrize("vMin", [0.01])
@pytest.mark.parametrize("vMax", [1.0])
def test_aggregate_contacts_two(capsys, matrix, outFileName, BED, ran, BED2, numberOfBins,
transform, avgType, outFilePrefixMatrix,
outFileContactPairs, diagnosticHeatmapFile, kmeans,
hclust, howToCluster, chromosomes, colorMap, plotType,
vMin, vMax):
# test outFileContactPairs^
args = "--matrix {} --outFileName {} --BED {} --range {} --BED2 {} " \
"--numberOfBins {} --transform {} --avgType {} --outFileContactPairs {} " \
"--kmeans {} --hclust {} " \
"--howToCluster {} --chromosomes {} --colorMap {} --plotType {} --vMin {} " \
"--vMax {} --disable_bbox_tight".format(matrix, outFileName.name, BED, ran,
BED2, numberOfBins, transform, avgType,
outFileContactPairs,
kmeans, hclust,
howToCluster, chromosomes, colorMap,
plotType, vMin, vMax).split()
hicexplorer.hicAggregateContacts.main(args)
os.remove(outFileName.name)
| gpl-2.0 |
sivaprakashniet/push_pull | p2p/lib/python2.7/site-packages/nose/commands.py | 68 | 6310 | """
nosetests setuptools command
----------------------------
The easiest way to run tests with nose is to use the `nosetests` setuptools
command::
python setup.py nosetests
This command has one *major* benefit over the standard `test` command: *all
nose plugins are supported*.
To configure the `nosetests` command, add a [nosetests] section to your
setup.cfg. The [nosetests] section can contain any command line arguments that
nosetests supports. The differences between issuing an option on the command
line and adding it to setup.cfg are:
* In setup.cfg, the -- prefix must be excluded
* In setup.cfg, command line flags that take no arguments must be given an
argument flag (1, T or TRUE for active, 0, F or FALSE for inactive)
Here's an example [nosetests] setup.cfg section::
[nosetests]
verbosity=1
detailed-errors=1
with-coverage=1
cover-package=nose
debug=nose.loader
pdb=1
pdb-failures=1
If you commonly run nosetests with a large number of options, using
the nosetests setuptools command and configuring with setup.cfg can
make running your tests much less tedious. (Note that the same options
and format supported in setup.cfg are supported in all other config
files, and the nosetests script will also load config files.)
Another reason to run tests with the command is that the command will
install packages listed in your `tests_require`, as well as doing a
complete build of your package before running tests. For packages with
dependencies or that build C extensions, using the setuptools command
can be more convenient than building by hand and running the nosetests
script.
Bootstrapping
-------------
If you are distributing your project and want users to be able to run tests
without having to install nose themselves, add nose to the setup_requires
section of your setup()::
setup(
# ...
setup_requires=['nose>=1.0']
)
This will direct setuptools to download and activate nose during the setup
process, making the ``nosetests`` command available.
"""
try:
from setuptools import Command
except ImportError:
Command = nosetests = None
else:
from nose.config import Config, option_blacklist, user_config_files, \
flag, _bool
from nose.core import TestProgram
from nose.plugins import DefaultPluginManager
def get_user_options(parser):
"""convert a optparse option list into a distutils option tuple list"""
opt_list = []
for opt in parser.option_list:
if opt._long_opts[0][2:] in option_blacklist:
continue
long_name = opt._long_opts[0][2:]
if opt.action not in ('store_true', 'store_false'):
long_name = long_name + "="
short_name = None
if opt._short_opts:
short_name = opt._short_opts[0][1:]
opt_list.append((long_name, short_name, opt.help or ""))
return opt_list
class nosetests(Command):
description = "Run unit tests using nosetests"
__config = Config(files=user_config_files(),
plugins=DefaultPluginManager())
__parser = __config.getParser()
user_options = get_user_options(__parser)
def initialize_options(self):
"""create the member variables, but change hyphens to
underscores
"""
self.option_to_cmds = {}
for opt in self.__parser.option_list:
cmd_name = opt._long_opts[0][2:]
option_name = cmd_name.replace('-', '_')
self.option_to_cmds[option_name] = cmd_name
setattr(self, option_name, None)
self.attr = None
def finalize_options(self):
"""nothing to do here"""
pass
def run(self):
"""ensure tests are capable of being run, then
run nose.main with a reconstructed argument list"""
if getattr(self.distribution, 'use_2to3', False):
# If we run 2to3 we can not do this inplace:
# Ensure metadata is up-to-date
build_py = self.get_finalized_command('build_py')
build_py.inplace = 0
build_py.run()
bpy_cmd = self.get_finalized_command("build_py")
build_path = bpy_cmd.build_lib
# Build extensions
egg_info = self.get_finalized_command('egg_info')
egg_info.egg_base = build_path
egg_info.run()
build_ext = self.get_finalized_command('build_ext')
build_ext.inplace = 0
build_ext.run()
else:
self.run_command('egg_info')
# Build extensions in-place
build_ext = self.get_finalized_command('build_ext')
build_ext.inplace = 1
build_ext.run()
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(
self.distribution.install_requires)
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(
self.distribution.tests_require)
ei_cmd = self.get_finalized_command("egg_info")
argv = ['nosetests', '--where', ei_cmd.egg_base]
for (option_name, cmd_name) in self.option_to_cmds.items():
if option_name in option_blacklist:
continue
value = getattr(self, option_name)
if value is not None:
argv.extend(
self.cfgToArg(option_name.replace('_', '-'), value))
TestProgram(argv=argv, config=self.__config)
def cfgToArg(self, optname, value):
argv = []
long_optname = '--' + optname
opt = self.__parser.get_option(long_optname)
if opt.action in ('store_true', 'store_false'):
if not flag(value):
raise ValueError("Invalid value '%s' for '%s'" % (
value, optname))
if _bool(value):
argv.append(long_optname)
else:
argv.extend([long_optname, value])
return argv
| bsd-3-clause |
skozilla/betfair.py | betfair/models.py | 2 | 14047 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from schematics.types import IntType
from schematics.types import LongType
from schematics.types import FloatType
from schematics.types import StringType
from schematics.types import BooleanType
from schematics.types.compound import DictType
from schematics.types.compound import ListType
from schematics.types.compound import ModelType
from betfair.meta.types import EnumType
from betfair.meta.types import DateTimeType
from betfair.meta.models import BetfairModel
from betfair import constants
class Event(BetfairModel):
id = StringType()
name = StringType()
country_code = StringType()
timezone = StringType()
venue = StringType()
open_date = DateTimeType()
class MarketDescription(BetfairModel):
persistence_enabled = BooleanType(required=True)
bsp_market = BooleanType(required=True)
market_time = DateTimeType(required=True)
suspend_time = DateTimeType(required=True)
settle_time = DateTimeType()
betting_type = EnumType(constants.MarketBettingType, required=True)
turn_in_play_enabled = BooleanType(required=True)
market_type = StringType(required=True)
regulator = StringType(required=True)
market_base_rate = FloatType(required=True)
discount_allowed = BooleanType(required=True)
wallet = StringType()
rules = StringType()
rules_has_date = BooleanType()
clarifications = StringType()
each_way_divisor = FloatType()
class RunnerCatalog(BetfairModel):
selection_id = IntType(required=True)
runner_name = StringType(required=True)
handicap = FloatType(required=True)
sort_priority = IntType(required=True)
metadata = DictType(StringType)
class EventType(BetfairModel):
id = StringType()
name = StringType()
class Competition(BetfairModel):
id = StringType()
name = StringType()
class MarketCatalogue(BetfairModel):
market_id = StringType()
market_name = StringType()
market_start_time = DateTimeType()
description = ModelType(MarketDescription)
total_matched = FloatType()
runners = ListType(ModelType(RunnerCatalog))
event_type = ModelType(EventType)
competition = ModelType(Competition)
event = ModelType(Event)
class TimeRange(BetfairModel):
from_ = DateTimeType(deserialize_from='from', serialized_name='from')
to = DateTimeType()
class MarketFilter(BetfairModel):
text_query = StringType()
exchange_ids = StringType()
event_type_ids = ListType(StringType)
event_ids = ListType(StringType)
competition_ids = ListType(StringType)
market_ids = ListType(StringType)
venues = ListType(StringType)
bsp_only = BooleanType()
turn_in_play_enabled = BooleanType()
in_play_only = BooleanType()
market_betting_types = ListType(EnumType(constants.MarketBettingType))
market_countries = ListType(StringType)
market_type_codes = ListType(StringType)
market_start_time = ModelType(TimeRange)
with_orders = ListType(EnumType(constants.OrderStatus))
class PriceSize(BetfairModel):
price = FloatType(required=True)
size = FloatType(required=True)
class StartingPrices(BetfairModel):
near_price = FloatType()
far_price = FloatType()
back_stake_taken = ListType(ModelType(PriceSize))
lay_liability_taken = ListType(ModelType(PriceSize))
actual_SP = FloatType()
class ExchangePrices(BetfairModel):
available_to_back = ListType(ModelType(PriceSize))
available_to_lay = ListType(ModelType(PriceSize))
traded_volume = ListType(ModelType(PriceSize))
class Order(BetfairModel):
bet_id = StringType(required=True)
order_type = EnumType(constants.OrderType, required=True)
status = EnumType(constants.OrderStatus, required=True)
persistence_type = EnumType(constants.PersistenceType, required=True)
side = EnumType(constants.Side, required=True)
price = FloatType(required=True)
size = FloatType(required=True)
bsp_liability = BooleanType(required=True)
placed_date = DateTimeType(required=True)
avg_price_matched = FloatType()
size_matched = FloatType()
size_remaining = FloatType()
size_lapsed = FloatType()
size_cancelled = FloatType()
size_voided = FloatType()
class Match(BetfairModel):
bet_id = StringType()
match_id = StringType()
side = EnumType(constants.Side, required=True)
price = FloatType(required=True)
size = FloatType(required=True)
match_date = DateTimeType()
class Runner(BetfairModel):
selection_id = IntType(required=True)
handicap = FloatType(required=True)
status = EnumType(constants.RunnerStatus, required=True)
adjustment_factor = FloatType()
last_price_traded = FloatType()
total_matched = FloatType()
removal_date = DateTimeType()
sp = ModelType(StartingPrices)
ex = ModelType(ExchangePrices)
orders = ListType(ModelType(Order))
matches = ListType(ModelType(Match))
class MarketBook(BetfairModel):
market_id = StringType(required=True)
is_market_data_delayed = BooleanType(required=True)
status = EnumType(constants.MarketStatus)
bet_delay = IntType()
bsp_reconciled = BooleanType()
complete = BooleanType()
inplay = BooleanType()
number_of_winners = IntType()
number_of_runners = IntType()
number_of_active_runners = IntType()
last_match_time = DateTimeType()
total_matched = FloatType()
total_available = FloatType()
cross_matching = BooleanType()
runners_voidable = BooleanType()
version = FloatType()
runners = ListType(ModelType(Runner))
class RunnerProfitAndLoss(BetfairModel):
selection_id = IntType()
if_win = FloatType()
if_lose = FloatType()
class MarketProfitAndLoss(BetfairModel):
market_id = StringType()
commission_applied = FloatType()
profit_and_losses = ListType(ModelType(RunnerProfitAndLoss))
class ExBestOffersOverrides(BetfairModel):
best_prices_depth = IntType()
rollup_model = EnumType(constants.RollupModel)
rollup_limit = IntType()
rollup_liability_threshold = FloatType()
rollup_liability_factor = IntType()
class PriceProjection(BetfairModel):
price_data = ListType(EnumType(constants.PriceData))
ex_best_offers_overrides = ModelType(ExBestOffersOverrides)
virtualise = BooleanType()
rollover_stakes = BooleanType()
class LimitOrder(BetfairModel):
size = FloatType(required=True)
price = FloatType(required=True)
persistence_type = EnumType(constants.PersistenceType, required=True)
class LimitOnCloseOrder(BetfairModel):
liability = FloatType(required=True)
price = FloatType(required=True)
class MarketOnCloseOrder(BetfairModel):
liability = FloatType(required=True)
# Results
class CompetitionResult(BetfairModel):
competition = ModelType(Competition)
market_count = IntType()
competition_region = StringType()
class CountryCodeResult(BetfairModel):
country_code = StringType()
market_count = IntType()
class EventResult(BetfairModel):
event = ModelType(Event)
market_count = IntType()
class EventTypeResult(BetfairModel):
event_type = ModelType(EventType)
market_count = IntType()
class MarketTypeResult(BetfairModel):
market_type = StringType()
market_count = IntType()
class TimeRangeResult(BetfairModel):
time_range = ModelType(TimeRange)
market_count = IntType()
class VenueResult(BetfairModel):
venue = StringType()
market_count = IntType()
# Instructions
class PlaceInstruction(BetfairModel):
order_type = EnumType(constants.OrderType, required=True)
selection_id = IntType(required=True)
handicap = FloatType()
side = EnumType(constants.Side, required=True)
limit_order = ModelType(LimitOrder)
limit_on_close_order = ModelType(LimitOnCloseOrder)
market_on_close_order = ModelType(MarketOnCloseOrder)
class CancelInstruction(BetfairModel):
bet_id = StringType(required=True)
size_reduction = FloatType()
class ReplaceInstruction(BetfairModel):
bet_id = StringType(required=True)
new_price = FloatType(required=True)
class UpdateInstruction(BetfairModel):
bet_id = StringType(required=True)
new_persistence_type = EnumType(constants.PersistenceType, required=True)
# Summary reports
class CurrentOrderSummary(BetfairModel):
bet_id = StringType(required=True)
market_id = StringType(required=True)
selection_id = IntType(required=True)
handicap = FloatType(required=True)
price_size = ModelType(PriceSize, required=True)
bsp_liability = FloatType(required=True)
side = EnumType(constants.Side, required=True)
status = EnumType(constants.OrderStatus, required=True)
persistence_type = EnumType(constants.PersistenceType, required=True)
order_type = EnumType(constants.OrderType, required=True)
placed_date = DateTimeType(required=True)
matched_date = DateTimeType()
average_price_matched = FloatType()
size_matched = FloatType()
size_remaining = FloatType()
size_lapsed = FloatType()
size_cancelled = FloatType()
size_voided = FloatType()
regulator_auth_code = StringType()
regulator_code = StringType()
class CurrentOrderSummaryReport(BetfairModel):
current_orders = ListType(ModelType(CurrentOrderSummary), required=True)
more_available = BooleanType(required=True)
class ItemDescription(BetfairModel):
event_type_desc = StringType()
event_desc = StringType()
market_desc = StringType()
market_start_Time = DateTimeType()
runner_desc = StringType()
number_of_winners = IntType()
class ClearedOrderSummary(BetfairModel):
event_type_id = StringType()
event_id = StringType()
market_id = StringType()
selection_id = IntType()
handicap = FloatType()
bet_id = StringType()
placed_date = DateTimeType()
persistence_type = EnumType(constants.PersistenceType)
order_type = EnumType(constants.OrderType)
side = EnumType(constants.Side)
item_description = ModelType(ItemDescription)
price_requested = FloatType()
settled_date = DateTimeType()
bet_count = IntType()
commission = FloatType()
price_matched = FloatType()
price_reduced = BooleanType()
size_settled = FloatType()
profit = FloatType()
size_cancelled = FloatType()
class ClearedOrderSummaryReport(BetfairModel):
cleared_orders = ListType(ModelType(ClearedOrderSummary), required=True)
more_available = BooleanType(required=True)
# Instruction reports
class BaseInstructionReport(BetfairModel):
status = EnumType(constants.InstructionReportStatus, required=True)
error_code = EnumType(constants.InstructionReportErrorCode)
class PlaceInstructionReport(BaseInstructionReport):
instruction = ModelType(PlaceInstruction, required=True)
bet_id = StringType()
placed_date = DateTimeType()
average_price_matched = FloatType()
size_matched = FloatType()
class CancelInstructionReport(BaseInstructionReport):
instruction = ModelType(CancelInstruction)
size_cancelled = FloatType(required=True)
cancelled_date = DateTimeType()
class ReplaceInstructionReport(BaseInstructionReport):
cancel_instruction_report = ModelType(CancelInstructionReport)
place_instruction_report = ModelType(PlaceInstructionReport)
class UpdateInstructionReport(BaseInstructionReport):
instruction = ModelType(UpdateInstruction, required=True)
# Execution reports
class BaseExecutionReport(BetfairModel):
customer_ref = StringType()
status = EnumType(constants.ExecutionReportStatus, required=True)
error_code = EnumType(constants.ExecutionReportErrorCode)
market_id = StringType()
class PlaceExecutionReport(BaseExecutionReport):
instruction_reports = ListType(ModelType(PlaceInstructionReport))
class CancelExecutionReport(BaseExecutionReport):
instruction_reports = ListType(ModelType(CancelInstructionReport))
class ReplaceExecutionReport(BaseExecutionReport):
instruction_reports = ListType(ModelType(ReplaceInstructionReport))
class UpdateExecutionReport(BaseExecutionReport):
instruction_reports = ListType(ModelType(UpdateInstructionReport))
# Accounts
class AccountFundsResponse(BetfairModel):
available_to_bet_balance = FloatType()
exposure = FloatType()
retained_commission = FloatType()
exposure_limit = FloatType()
discount_rate = FloatType()
points_balance = IntType()
wallet = EnumType(constants.Wallet)
class StatementLegacyData(BetfairModel):
avg_price = FloatType()
bet_size = FloatType()
bet_type = StringType()
bet_category_type = StringType()
commission_rate = StringType()
event_id = LongType()
event_type_id = LongType()
full_market_name = StringType()
gross_bet_amount = FloatType()
market_name = StringType()
market_type = StringType()
placed_date = DateTimeType()
selection_id = LongType()
selection_name = StringType()
start_date = DateTimeType()
transaction_type = StringType()
transaction_id = LongType()
win_lose = StringType()
class StatementItem(BetfairModel):
ref_id = StringType()
item_date = DateTimeType()
amount = FloatType()
balance = FloatType()
item_class = EnumType(constants.ItemClass)
item_class_data = DictType(StringType)
legacy_data = ModelType(StatementLegacyData)
class AccountDetailsResponse(BetfairModel):
currency_code = StringType()
first_name = StringType()
last_name = StringType()
locale_code = StringType()
region = StringType()
timezone = StringType()
discount_rate = FloatType()
points_balance = IntType()
country_code = StringType()
class AccountStatementReport(BetfairModel):
account_statement = ListType(ModelType(StatementItem))
more_available = BooleanType()
class CurrencyRate(BetfairModel):
currency_code = StringType()
rate = FloatType()
class TransferResponse(BetfairModel):
transaction_id = StringType()
| mit |
jeffery9/mixprint_addons | account_payment/wizard/account_payment_pay.py | 54 | 2479 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
#TODO:REMOVE this wizard is not used
class account_payment_make_payment(osv.osv_memory):
_name = "account.payment.make.payment"
_description = "Account make payment"
def launch_wizard(self, cr, uid, ids, context=None):
"""
Search for a wizard to launch according to the type.
If type is manual. just confirm the order.
"""
obj_payment_order = self.pool.get('payment.order')
if context is None:
context = {}
# obj_model = self.pool.get('ir.model.data')
# obj_act = self.pool.get('ir.actions.act_window')
# order = obj_payment_order.browse(cr, uid, context['active_id'], context)
obj_payment_order.set_done(cr, uid, [context['active_id']], context)
return {'type': 'ir.actions.act_window_close'}
# t = order.mode and order.mode.type.code or 'manual'
# if t == 'manual':
# obj_payment_order.set_done(cr,uid,context['active_id'],context)
# return {}
#
# gw = obj_payment_order.get_wizard(t)
# if not gw:
# obj_payment_order.set_done(cr,uid,context['active_id'],context)
# return {}
#
# module, wizard= gw
# result = obj_model._get_id(cr, uid, module, wizard)
# id = obj_model.read(cr, uid, [result], ['res_id'])[0]['res_id']
# return obj_act.read(cr, uid, [id])[0]
account_payment_make_payment()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nicolargo/intellij-community | python/lib/Lib/site-packages/django/contrib/admin/media/js/compress.py | 784 | 1896 | #!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| apache-2.0 |
Qihoo360/luajit-jsonnet | libjsonnet/setup.py | 6 | 1969 | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from setuptools import setup
from setuptools import Extension
from setuptools.command.build_ext import build_ext as BuildExt
from subprocess import Popen
DIR = os.path.abspath(os.path.dirname(__file__))
LIB_OBJECTS = ['libjsonnet.o', 'lexer.o', 'parser.o', 'static_analysis.o', 'vm.o']
MODULE_SOURCES = ['_jsonnet.c']
def get_version():
"""
Parses the version out of libjsonnet.h
"""
with open(os.path.join(DIR, 'libjsonnet.h')) as f:
for line in f:
if '#define' in line and 'LIB_JSONNET_VERSION' in line:
return line.partition('LIB_JSONNET_VERSION')[2].strip('\n "')
class BuildJsonnetExt(BuildExt):
def run(self):
p = Popen(['make'] + LIB_OBJECTS, cwd=DIR)
p.wait()
if p.returncode != 0:
raise Exception('Could not build %s' % (', '.join(LIB_OBJECTS)))
BuildExt.run(self)
jsonnet_ext = Extension(
'_jsonnet',
sources=MODULE_SOURCES,
extra_objects=LIB_OBJECTS,
language='c++'
)
setup(name='jsonnet',
url='https://google.github.io/jsonnet/doc/',
description='Python bindings for Jsonnet - The data templating language ',
author='David Cunningham',
author_email='dcunnin@google.com',
version=get_version(),
cmdclass={
'build_ext': BuildJsonnetExt,
},
ext_modules=[jsonnet_ext],
)
| apache-2.0 |
grimreaper/parallel-ssh | psshlib/cli.py | 58 | 4364 | # Copyright (c) 2009-2012, Andrew McNabb
# Copyright (c) 2003-2008, Brent N. Chun
import optparse
import os
import shlex
import sys
import textwrap
from psshlib import version
_DEFAULT_PARALLELISM = 32
_DEFAULT_TIMEOUT = 0 # "infinity" by default
def common_parser():
"""
Create a basic OptionParser with arguments common to all pssh programs.
"""
# The "resolve" conflict handler avoids errors from the hosts option
# conflicting with the help option.
parser = optparse.OptionParser(conflict_handler='resolve',
version=version.VERSION)
# Ensure that options appearing after the command are sent to ssh.
parser.disable_interspersed_args()
parser.epilog = "Example: pssh -h nodes.txt -l irb2 -o /tmp/foo uptime"
parser.add_option('-h', '--hosts', dest='host_files', action='append',
metavar='HOST_FILE',
help='hosts file (each line "[user@]host[:port]")')
parser.add_option('-H', '--host', dest='host_strings', action='append',
metavar='HOST_STRING',
help='additional host entries ("[user@]host[:port]")')
parser.add_option('-l', '--user', dest='user',
help='username (OPTIONAL)')
parser.add_option('-p', '--par', dest='par', type='int',
help='max number of parallel threads (OPTIONAL)')
parser.add_option('-o', '--outdir', dest='outdir',
help='output directory for stdout files (OPTIONAL)')
parser.add_option('-e', '--errdir', dest='errdir',
help='output directory for stderr files (OPTIONAL)')
parser.add_option('-t', '--timeout', dest='timeout', type='int',
help='timeout (secs) (0 = no timeout) per host (OPTIONAL)')
parser.add_option('-O', '--option', dest='options', action='append',
metavar='OPTION', help='SSH option (OPTIONAL)')
parser.add_option('-v', '--verbose', dest='verbose', action='store_true',
help='turn on warning and diagnostic messages (OPTIONAL)')
parser.add_option('-A', '--askpass', dest='askpass', action='store_true',
help='Ask for a password (OPTIONAL)')
parser.add_option('-x', '--extra-args', action='callback', type='string',
metavar='ARGS', callback=shlex_append, dest='extra',
help='Extra command-line arguments, with processing for '
'spaces, quotes, and backslashes')
parser.add_option('-X', '--extra-arg', dest='extra', action='append',
metavar='ARG', help='Extra command-line argument')
return parser
def common_defaults(**kwargs):
defaults = dict(par=_DEFAULT_PARALLELISM, timeout=_DEFAULT_TIMEOUT)
defaults.update(**kwargs)
envvars = [('user', 'PSSH_USER'),
('par', 'PSSH_PAR'),
('outdir', 'PSSH_OUTDIR'),
('errdir', 'PSSH_ERRDIR'),
('timeout', 'PSSH_TIMEOUT'),
('verbose', 'PSSH_VERBOSE'),
('print_out', 'PSSH_PRINT'),
('askpass', 'PSSH_ASKPASS'),
('inline', 'PSSH_INLINE'),
('recursive', 'PSSH_RECURSIVE'),
('archive', 'PSSH_ARCHIVE'),
('compress', 'PSSH_COMPRESS'),
('localdir', 'PSSH_LOCALDIR'),
]
for option, var, in envvars:
value = os.getenv(var)
if value:
defaults[option] = value
value = os.getenv('PSSH_OPTIONS')
if value:
defaults['options'] = [value]
value = os.getenv('PSSH_HOSTS')
if value:
message1 = ('Warning: the PSSH_HOSTS environment variable is '
'deprecated. Please use the "-h" option instead, and consider '
'creating aliases for convenience. For example:')
message2 = " alias pssh_abc='pssh -h /path/to/hosts_abc'"
sys.stderr.write(textwrap.fill(message1))
sys.stderr.write('\n')
sys.stderr.write(message2)
sys.stderr.write('\n')
defaults['host_files'] = [value]
return defaults
def shlex_append(option, opt_str, value, parser):
"""An optparse callback similar to the append action.
The given value is processed with shlex, and the resulting list is
concatenated to the option's dest list.
"""
lst = getattr(parser.values, option.dest)
if lst is None:
lst = []
setattr(parser.values, option.dest, lst)
lst.extend(shlex.split(value))
| bsd-3-clause |
mohierf/bottle-webui | alignak_webui/objects/element_state.py | 1 | 13027 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Many functions need to use protected members of a base class
# pylint: disable=protected-access
# Attributes need to be defined in constructor before initialization
# pylint: disable=attribute-defined-outside-init
# Copyright (c) 2015-2017:
# Frederic Mohier, frederic.mohier@alignak.net
#
# This file is part of (WebUI).
#
# (WebUI) is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# (WebUI) is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with (WebUI). If not, see <http://www.gnu.org/licenses/>.
"""
This module contains the base class used to manage the application objects configuration:
- representation,
- date
-...
"""
from __future__ import print_function
from logging import getLogger, INFO
from dateutil import tz
from alignak_webui import get_app_config
# Set logger level to INFO, this to allow global application DEBUG logs without being spammed... ;)
# pylint: disable=invalid-name
logger = getLogger(__name__)
logger.setLevel(INFO)
# pylint: disable=too-few-public-methods
class ElementState(object):
"""
Singleton design pattern ...
"""
class __ElementState(object):
"""
Base class for all objects state management (displayed icon, ...)
"""
def __init__(self):
self.states = {}
# Get global configuration
app_config = get_app_config()
if not app_config: # pragma: no cover, should not happen
print("No application configuration!")
assert False
self.object_types_states = {}
self.default_states = {}
for s in app_config:
s = s.split('.')
if s[0] not in ['items']:
continue
logger.debug("ElementState, item configuration element: %s", s)
if s[1] == 'item':
if s[2] not in self.default_states:
self.default_states[s[2]] = []
continue
if s[1] not in ['content', 'back', 'front', 'badge']:
if s[1] not in self.object_types_states:
self.object_types_states[s[1]] = []
if s[2] and s[2] not in self.object_types_states[s[1]]:
self.object_types_states[s[1]].append(s[2])
logger.debug("ElementState, object types and states: %s", self.object_types_states)
logger.debug("ElementState, default states: %s", self.default_states)
# Application locales, timezone, ...
# Set timezones
self.tz_from = tz.gettz('UTC')
logger.debug(
"Set default time zone: %s",
app_config.get("timezone", 'Europe/Paris')
)
self.tz_to = tz.gettz(app_config.get("timezone", 'Europe/Paris'))
# Set class date format
logger.debug(
"Set default time format string: %s",
app_config.get("timeformat", '%Y-%m-%d %H:%M:%S')
)
self.date_format = app_config.get("timeformat", '%Y-%m-%d %H:%M:%S')
# For each defined object type and object type state ...
for object_type in self.object_types_states:
self.states[object_type] = {}
for state in self.object_types_states[object_type]:
self.states[object_type][state] = {}
for prop in ['text', 'icon', 'class']:
search = "items.%s.%s.%s" % (object_type, state, prop)
if "items.%s.%s.%s" % (object_type, state, prop) in app_config:
self.states[object_type][state][prop] = app_config.get(search)
else: # pragma: no cover, should not happen
self.states[object_type][state][prop] = \
app_config.get("items.%s.%s" % (state, prop), '')
# If no states is defined for element type, define default states ...
# if not self.states:
for state in self.default_states:
if state not in self.states[object_type]:
self.states[object_type][state] = {}
for prop in ['text', 'icon', 'class']:
self.states[object_type][state][prop] = \
app_config.get("items.item.%s.%s" % (state, prop), '')
# Build a self state view with content, back and front templates
self.states[object_type]['state_view'] = {}
for prop in ['content', 'back', 'front', 'badge']:
search = "items.%s.%s" % (object_type, prop)
if "items.%s.%s" % (object_type, prop) in app_config: # pragma: no cover
self.states[object_type]['state_view'][prop] = \
app_config.get(search)
else:
self.states[object_type]['state_view'][prop] = \
app_config.get("items.%s" % prop)
logger.debug(
" --- class configuration: %s: %s",
object_type, self.states[object_type]
)
def get_objects_types(self):
"""
Return all the configured objects types
All other object type will use the default 'item' configuration
"""
return [s for s in self.states]
def get_icon_states(self, object_type=None):
""" Return all the configured states for an object type """
if not object_type:
return self.states
if object_type in self.states:
return self.states[object_type]
return []
def get_default_states(self):
""" Return all the configured states for a generic item """
return [s for s in self.default_states]
def get_icon_state(self, object_type, status):
""" Return the configured state for an object type """
if not object_type or not status:
return None
status = status.lower()
if status not in self.get_icon_states(object_type):
return None
for s in self.get_icon_states(object_type):
if status == s:
return self.get_icon_states(object_type)[s]
return None
def get_html_state(self, object_type, object_item, extra='', icon=True, text='',
title='', disabled=False, size='', use_status=None):
# pylint: disable=too-many-arguments
# Yes, but it is needed ;)
# pylint: disable=too-many-locals, too-many-return-statements
# Yes, but else it will be quite difficult :/
"""
Returns an item status as HTML text and icon if needed
If parameters are not valid, returns 'n/a'
If disabled is True, the class does not depend upon object status and is always
text-muted
If a title is specified, it will be used instead of the default built-in text.
If object status contains '.' characters they are replaced with '_'
Text and icon are defined in the application configuration file.
:param size:
:param disabled:
:param title:
:param object_type: element type
:type object_type: string
:param object_item: element
:param extra: extra string replacing ##extra##, and set opacity to 0.5
:type extra: string
:param text: include text in the response
:type text: string
:param icon: include icon in the response
:type icon: boolean
:return: formatted status HTML string
:rtype: string
"""
if not object_type: # pragma: no cover, should not happen
return 'n/a - element'
if not object_item: # pragma: no cover, should not happen
return 'n/a - object'
if not icon and not text:
return 'n/a - icon/text'
status = object_item.status
if use_status:
status = use_status
status = status.replace('.', '_').lower()
if object_type in self.get_objects_types():
if status not in self.get_icon_states(object_type):
return 'n/a - status: ' + status
else:
if status not in self.get_default_states(): # pragma: no cover, should not happen
return 'n/a - default status: ' + status
cfg_state = self.get_icon_state(object_type, status)
if object_type not in self.get_objects_types() and status in self.get_default_states():
cfg_state = self.get_icon_state("user", status)
logger.debug("get_html_state, states: %s", cfg_state)
cfg_state_view = self.get_icon_state(object_type, 'state_view')
if object_type not in self.get_objects_types():
cfg_state_view = self.get_icon_state("user", 'state_view')
if not cfg_state_view: # pragma: no cover, should not happen
return 'n/a - cfg_state_view'
# logger.debug("get_html_state, states view: %s", cfg_state_view)
# Text
res_icon_state = cfg_state['icon']
res_icon_text = cfg_state['text']
res_icon_class = 'item_' + cfg_state['class']
res_text = res_icon_text
if not icon:
if text == '':
return res_text
else:
return text
# Icon
res_icon_global = cfg_state_view['content']
res_icon_back = cfg_state_view['back']
res_icon_front = cfg_state_view['front']
res_extra = ""
if extra:
res_extra = extra
res_opacity = ""
if extra:
res_opacity = 'style="opacity: 0.5"'
# Assembling ...
item_id = object_item.id
res_icon = res_icon_global
res_icon = res_icon.replace("##type##", object_type)
res_icon = res_icon.replace("##id##", item_id)
res_icon = res_icon.replace("##name##", object_item.alias)
res_icon = res_icon.replace("##state##", object_item.get_state())
res_icon = res_icon.replace("##back##", res_icon_back)
res_icon = res_icon.replace("##front##", res_icon_front)
res_icon = res_icon.replace("##status##", status.lower())
res_icon = res_icon.replace("##size##", size)
if not disabled:
res_icon = res_icon.replace("##class##", res_icon_class)
else:
res_icon = res_icon.replace("##class##", "text-muted")
res_icon = res_icon.replace("##icon##", res_icon_state)
res_icon = res_icon.replace("##extra##", res_extra)
res_icon = res_icon.replace("##opacity##", res_opacity)
if not title:
title = res_text
if text is None:
res_text = ''
elif text != '':
res_text = text
if extra:
res_text += extra
res_icon = res_icon.replace("##title##", title)
res_icon = res_icon.replace("##text##", res_text)
logger.debug("get_html_state, res_icon: %s", res_icon)
res_icon = res_icon.replace("\n", "")
res_icon = res_icon.replace("\r", "")
return res_icon
instance = None
def __new__(cls):
if not ElementState.instance:
ElementState.instance = ElementState.__ElementState()
return ElementState.instance
def get_html_state(self, object_type, object_item, extra='', icon=True, text='',
title='', disabled=False, size='', use_status=None):
# pylint: disable=too-many-arguments
"""
Base function used by Item objects
"""
return self.instance.get_html_state(object_type, object_item,
extra, icon, text, title, disabled, size,
use_status)
| agpl-3.0 |
spacy-io/thinc | thinc/neural/_classes/rnn.py | 1 | 8999 | # coding: utf8
from __future__ import unicode_literals
from .model import Model
from ... import describe
from ...describe import Dimension, Synapses, Biases, Gradient
from ...api import wrap, layerize
from .._lsuv import svd_orthonormal
from ..util import copy_array
def BiLSTM(nO, nI):
"""Create a bidirectional LSTM layer. Args: number out, number in"""
return Bidirectional(LSTM(nO // 2, nI), LSTM(nO // 2, nI))
def LSTM(nO, nI):
"""Create an LSTM layer. Args: number out, number in"""
weights = LSTM_weights(nO, nI)
gates = LSTM_gates(weights.ops)
return Recurrent(RNN_step(weights, gates))
def Bidirectional(l2r, r2l):
"""Stitch two RNN models into a bidirectional layer."""
nO = l2r.nO
def birnn_fwd(Xs, drop=0.0):
l2r_Zs, bp_l2r_Zs = l2r.begin_update(Xs, drop=drop)
r2l_Zs, bp_r2l_Zs = r2l.begin_update(
[l2r.ops.xp.ascontiguousarray(X[::-1]) for X in Xs]
)
def birnn_bwd(dZs, sgd=None):
d_l2r_Zs = []
d_r2l_Zs = []
for dZ in dZs:
l2r_fwd = dZ[:, :nO]
r2l_fwd = dZ[:, nO:]
d_l2r_Zs.append(l2r.ops.xp.ascontiguousarray(l2r_fwd))
d_r2l_Zs.append(l2r.ops.xp.ascontiguousarray(r2l_fwd[::-1]))
dXs_l2r = bp_l2r_Zs(d_l2r_Zs, sgd=sgd)
dXs_r2l = bp_r2l_Zs(d_r2l_Zs, sgd=sgd)
dXs = [dXf + dXb[::-1] for dXf, dXb in zip(dXs_l2r, dXs_r2l)]
return dXs
Zs = [l2r.ops.xp.hstack((Zf, Zb[::-1])) for Zf, Zb in zip(l2r_Zs, r2l_Zs)]
return Zs, birnn_bwd
return wrap(birnn_fwd, l2r, r2l)
def Recurrent(step_model):
"""Apply a stepwise model over a sequence, maintaining state. For RNNs"""
ops = step_model.ops
def recurrent_fwd(seqs, drop=0.0):
lengths = [len(X) for X in seqs]
X, size_at_t, unpad = ops.square_sequences(seqs)
Y = ops.allocate((X.shape[0], X.shape[1], step_model.nO))
cell_drop = ops.get_dropout_mask((len(seqs), step_model.nO), 0.0)
hidden_drop = ops.get_dropout_mask((len(seqs), step_model.nO), 0.0)
out_drop = ops.get_dropout_mask((len(seqs), step_model.nO), 0.0)
backprops = [None] * max(lengths)
state = step_model.weights.get_initial_state(len(seqs))
for t in range(max(lengths)):
state = list(state)
size = size_at_t[t]
Xt = X[t, :size]
state[0] = state[0][:size]
state[1] = state[1][:size]
if cell_drop is not None:
state[0] *= cell_drop
if hidden_drop is not None:
state[1] *= hidden_drop
inputs = (state, Xt)
(state, Y[t, :size]), backprops[t] = step_model.begin_update(inputs)
if out_drop is not None:
Y[t, :size] *= out_drop
outputs = unpad(Y)
def recurrent_bwd(d_outputs, sgd=None):
dY, size_at_t, unpad = step_model.ops.square_sequences(d_outputs)
d_state = [
step_model.ops.allocate((dY.shape[1], step_model.nO)),
step_model.ops.allocate((dY.shape[1], step_model.nO)),
]
updates = {}
def gather_updates(weights, gradient, key=None):
updates[key] = (weights, gradient)
dX = step_model.ops.allocate(
(dY.shape[0], dY.shape[1], step_model.weights.nI)
)
for t in range(max(lengths) - 1, -1, -1):
if out_drop is not None:
dY[t] *= out_drop
d_state_t, dXt = backprops[t]((d_state, dY[t]), sgd=gather_updates)
d_state[0][: d_state_t[0].shape[0]] = d_state_t[0]
d_state[1][: d_state_t[1].shape[0]] = d_state_t[1]
dX[t, : dXt.shape[0]] = dXt
if cell_drop is not None:
d_state[0] *= cell_drop
if hidden_drop is not None:
d_state[1] *= hidden_drop
d_cell, d_hidden = d_state
step_model.weights.d_initial_cells += d_cell.sum(axis=0)
step_model.weights.d_initial_hiddens += d_hidden.sum(axis=0)
if sgd is not None:
for key, (weights, gradient) in updates.items():
sgd(weights, gradient, key=key)
return unpad(dX)
return outputs, recurrent_bwd
model = wrap(recurrent_fwd, step_model)
model.nO = step_model.nO
return model
def RNN_step(weights, gates):
"""Create a step model for an RNN, given weights and gates functions."""
def rnn_step_fwd(prevstate_inputs, drop=0.0):
prevstate, inputs = prevstate_inputs
cell_tm1, hidden_tm1 = prevstate
acts, bp_acts = weights.begin_update((inputs, hidden_tm1), drop=drop)
(cells, hiddens), bp_gates = gates.begin_update((acts, cell_tm1), drop=drop)
def rnn_step_bwd(d_state_d_hiddens, sgd=None):
(d_cells, d_hiddens), d_hiddens = d_state_d_hiddens
d_acts, d_cell_tm1 = bp_gates((d_cells, d_hiddens), sgd=sgd)
d_inputs, d_hidden_tm1 = bp_acts(d_acts, sgd=sgd)
return (d_cell_tm1, d_hidden_tm1), d_inputs
return ((cells, hiddens), hiddens), rnn_step_bwd
model = wrap(rnn_step_fwd, weights, gates)
model.nO = weights.nO
model.nI = weights.nI
model.weights = weights
model.gates = gates
return model
def LSTM_gates(ops):
def lstm_gates_fwd(acts_prev_cells, drop=0.0):
acts, prev_cells = acts_prev_cells
new_cells = ops.allocate(prev_cells.shape)
new_hiddens = ops.allocate(prev_cells.shape)
ops.lstm(new_hiddens, new_cells, acts, prev_cells)
state = (new_cells, new_hiddens)
size = new_cells.shape[0]
def lstm_gates_bwd(d_state, sgd=None):
d_cells, d_hiddens = d_state
d_cells = d_cells[:size]
d_hiddens = d_hiddens[:size]
d_acts = [ops.allocate(act.shape) for act in acts]
d_prev_cells = ops.allocate(prev_cells.shape)
ops.backprop_lstm(
d_cells, d_prev_cells, d_acts, d_hiddens, acts, new_cells, prev_cells
)
return d_acts, d_prev_cells
return state, lstm_gates_bwd
return layerize(lstm_gates_fwd)
def _uniform_init(lo, hi):
def wrapped(W, ops):
copy_array(W, ops.xp.random.uniform(lo, hi, W.shape))
return wrapped
@describe.attributes(
nO=Dimension("Output size"),
nI=Dimension("Input size"),
W=Synapses(
"Weights matrix",
lambda obj: (obj.nO * 4, obj.nI + obj.nO),
lambda W, ops: copy_array(W, svd_orthonormal(W.shape)),
),
b=Biases("Bias vector", lambda obj: (obj.nO * 4,)),
forget_bias=Biases(
"Bias for forget gates",
lambda obj: (obj.nO,),
lambda b, ops: copy_array(b, ops.xp.ones(b.shape, dtype=b.dtype)),
),
d_W=Gradient("W"),
d_b=Gradient("b"),
d_forget_bias=Gradient("forget_bias"),
initial_hiddens=Biases(
"Initial hiddens", lambda obj: (obj.nO,), _uniform_init(-0.1, 0.1)
),
initial_cells=Biases(
"Initial cells", lambda obj: (obj.nO,), _uniform_init(-0.1, 0.1)
),
d_initial_hiddens=Gradient("initial_hiddens"),
d_initial_cells=Gradient("initial_cells"),
)
class LSTM_weights(Model):
def __init__(self, nO, nI):
Model.__init__(self)
self.nO = nO
self.nI = nI
def begin_update(self, inputs_hidden, drop=0.0):
inputs, hidden = inputs_hidden
assert inputs.dtype == "float32"
X = self.ops.xp.hstack([inputs, hidden])
acts = self.ops.gemm(X, self.W, trans2=True) + self.b
acts = self._split_activations(acts)
acts[0] += self.forget_bias
def bwd_lstm_weights(d_acts, sgd=None):
self.d_forget_bias += d_acts[0].sum(axis=0)
d_acts = self._merge_activations(d_acts)
dX = self.ops.gemm(d_acts, self.W)
self.d_W += self.ops.gemm(d_acts, X, trans1=True)
self.d_b += d_acts.sum(axis=0)
d_input = dX[:, : self.nI]
d_hidden = dX[:, self.nI :]
if sgd is not None:
sgd(self._mem.weights, self._mem.gradient, key=self.id)
return d_input, d_hidden
return acts, bwd_lstm_weights
def get_initial_state(self, n):
initial_cells = self.ops.allocate((n, self.nO))
initial_hiddens = self.ops.allocate((n, self.nO))
initial_cells += self.initial_cells
initial_hiddens += self.initial_hiddens
return (initial_cells, initial_hiddens)
def _split_activations(self, acts):
acts = acts.reshape((acts.shape[0], 4, self.nO))
acts = self.ops.xp.ascontiguousarray(acts.transpose((1, 0, 2)))
return [acts[0], acts[1], acts[2], acts[3]]
def _merge_activations(self, act_pieces):
return self.ops.xp.hstack(act_pieces)
| mit |
elgambitero/FreeCAD_sf_master | src/Mod/Plot/plotSeries/__init__.py | 280 | 1806 | #***************************************************************************
#* *
#* Copyright (c) 2011, 2012 *
#* Jose Luis Cercos Pita <jlcercos@gmail.com> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import TaskPanel
def load():
"""Load the tool"""
TaskPanel.createTask()
| lgpl-2.1 |
berquist/PyQuante | PyQuante/IO/FormatHandlers/XYZ.py | 2 | 1987 | from PyQuante.IO.Data import Data
class Handler(object):
key = "xyz"
description = "XYZ File Format"
ext = ".xyz"
def read(self,string):
"""
Arguments:
- string: String to parse
Return:
- data: Data object, with a molecule and a molecules
attribute.
"""
from PyQuante.Element import sym2no
from PyQuante.Molecule import Molecule
geometries = []
igeo = 1
lines = string.splitlines()
while 1:
try:
line = lines.pop(0)
except IndexError:
break
if not line: break
nat = int(line.split()[0])
title = lines.pop(0)
atoms = []
for i in xrange(nat):
line = lines.pop(0)
words = line.split()
atno = sym2no[words[0]]
x,y,z = map(float,words[1:])
atoms.append((atno,(x,y,z)))
atoms = Molecule("XYZ geometry #%d" % igeo,atoms)
igeo += 1
geometries.append(atoms)
data = Data()
data.molecule = geometries[0] # First geometry
data.molecules = geometries
return data
def write(self,data):
ret = ''
if data.molecules:
for molecule in data.molecules:
ret += self.generate_entry(molecule)
elif data.molecule:
ret += self.generate_entry(data.molecule)
else:
raise ValueError("Nothing to generate")
return ret
def generate_entry(self,molecule):
from PyQuante.Element import symbol
ret = ''
ret+="%d\n%s\n" % (len(molecule.atoms),"XYZ File generated by PyQuante")
for atom in molecule.atoms:
atno,(x,y,z) = atom.atuple()
ret+=("%4s %10.4f %10.4f %10.4f\n"
% (symbol[atno],x,y,z))
return ret
| bsd-3-clause |
mollstam/UnrealPy | UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/boto-2.38.0/boto/cognito/sync/exceptions.py | 135 | 1648 | # Copyright (c) 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.exception import BotoServerError
class LimitExceededException(BotoServerError):
pass
class ResourceConflictException(BotoServerError):
pass
class InvalidConfigurationException(BotoServerError):
pass
class TooManyRequestsException(BotoServerError):
pass
class InvalidParameterException(BotoServerError):
pass
class ResourceNotFoundException(BotoServerError):
pass
class InternalErrorException(BotoServerError):
pass
class NotAuthorizedException(BotoServerError):
pass
| mit |
krafczyk/spack | var/spack/repos/builtin/packages/dyninst/package.py | 2 | 4619 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Dyninst(Package):
"""API for dynamic binary instrumentation. Modify programs while they
are executing without recompiling, re-linking, or re-executing."""
homepage = "https://paradyn.org"
git = "https://github.com/dyninst/dyninst.git"
version('develop', branch='master')
version('9.3.2', tag='v9.3.2')
version('9.3.0', tag='v9.3.0')
version('9.2.0', tag='v9.2.0')
version('9.1.0', tag='v9.1.0')
version('8.2.1', tag='v8.2.1')
version('8.1.2', tag='v8.1.2')
version('8.1.1', tag='v8.1.1')
variant('stat_dysect', default=False,
description="patch for STAT's DySectAPI")
# Dyninst depends on libelf and libdwarf prior to @9.3.0
# Dyninst depends on elfutils and libdwarf from @9.3.0 to but
# not including @develop
# Dyninst depends on elfutils and elfutils libdw from @develop forward
# elf@0 is an abstaction for libelf
# elf@1 is an abstaction for elfutils
depends_on("elf@0", type='link', when='@:9.2.99')
# The sorting algorithm puts numbered releases as newer than alphabetic
# releases, but spack has special logic in place to ensure that
# develop is considered newer than all other releases.
# So, develop is included in the elf@1 line below.
depends_on("elf@1", type='link', when='@9.3.0:')
depends_on("libdwarf", when='@:9')
depends_on("boost@1.42:")
depends_on('cmake', type='build')
patch('stat_dysect.patch', when='+stat_dysect')
patch('stackanalysis_h.patch', when='@9.2.0')
# new version uses cmake
def install(self, spec, prefix):
if spec.satisfies('@:8.1'):
configure("--prefix=" + prefix)
make()
make("install")
return
libelf = spec['elf'].prefix
if spec.satisfies('@:9'):
libdwarf = spec['libdwarf'].prefix
with working_dir('spack-build', create=True):
args = ['..',
'-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include,
'-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib,
'-DBoost_NO_SYSTEM_PATHS=TRUE',
'-DLIBELF_INCLUDE_DIR=%s' % join_path(
libelf.include, 'libelf'),
'-DLIBELF_LIBRARIES=%s' % join_path(
libelf.lib, "libelf." + dso_suffix)]
if spec.satisfies('@:9'):
args.append('-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include)
args.append('-DLIBDWARF_LIBRARIES=%s' % join_path(
libdwarf.lib, "libdwarf." + dso_suffix))
# For @develop + use elfutils libdw, libelf is an abstraction
# we are really using elfutils here
if spec.satisfies('@develop'):
args.append('-DLIBDWARF_INCLUDE_DIR=%s' % libelf.include)
args.append('-DLIBDWARF_LIBRARIES=%s' % join_path(
libelf.lib, "libdw." + dso_suffix))
if spec.satisfies('arch=linux-redhat7-ppc64le'):
args.append('-Darch_ppc64_little_endian=1')
args += std_cmake_args
cmake(*args)
make()
make("install")
@when('@:8.1')
def install(self, spec, prefix):
configure("--prefix=" + prefix)
make()
make("install")
| lgpl-2.1 |
eugeniashurko/ReGraph | docs/source/conf.py | 1 | 5928 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# ReGraph documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 4 16:51:07 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.githubpages',
'sphinx.ext.autosummary',
'numpydoc',
'sphinx.ext.mathjax'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'ReGraph'
copyright = '2017, Eugenia Oshurko, Yves-Stan Le Cornec'
author = 'Eugenia Oshurko, Yves-Stan Le Cornec'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': [
'about.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html',
'donate.html',
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'ReGraphdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'ReGraph.tex', 'ReGraph Documentation',
'Eugenia Oshurko, Yves-Stan Le Cornec', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'regraph', 'ReGraph Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'ReGraph', 'ReGraph Documentation',
author, 'ReGraph', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| mit |
saeki-masaki/glance | glance/async/flows/convert.py | 5 | 3953 | # Copyright 2015 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from oslo_concurrency import processutils as putils
from oslo_config import cfg
from taskflow.patterns import linear_flow as lf
from taskflow import task
from glance import i18n
_ = i18n._
_LI = i18n._LI
_LE = i18n._LE
_LW = i18n._LW
LOG = logging.getLogger(__name__)
convert_task_opts = [
cfg.StrOpt('conversion_format',
default=None,
choices=('qcow2', 'raw', 'vmdk'),
help=_("The format to which images will be automatically "
"converted.")),
]
CONF = cfg.CONF
# NOTE(flaper87): Registering under the taskflow_executor section
# for now. It seems a waste to have a whole section dedicated to a
# single task with a single option.
CONF.register_opts(convert_task_opts, group='taskflow_executor')
class _Convert(task.Task):
conversion_missing_warned = False
def __init__(self, task_id, task_type, image_repo):
self.task_id = task_id
self.task_type = task_type
self.image_repo = image_repo
super(_Convert, self).__init__(
name='%s-Convert-%s' % (task_type, task_id))
def execute(self, image_id, file_path):
# NOTE(flaper87): A format must be explicitly
# specified. There's no "sane" default for this
# because the dest format may work differently depending
# on the environment OpenStack is running in.
conversion_format = CONF.taskflow_executor.conversion_format
if conversion_format is None:
if not _Convert.conversion_missing_warned:
msg = (_LW('The conversion format is None, please add a value '
'for it in the config file for this task to '
'work: %s') %
self.task_id)
LOG.warn(msg)
_Convert.conversion_missing_warned = True
return
# TODO(flaper87): Check whether the image is in the desired
# format already. Probably using `qemu-img` just like the
# `Introspection` task.
dest_path = os.path.join(CONF.task.work_dir, "%s.converted" % image_id)
stdout, stderr = putils.trycmd('qemu-img', 'convert', '-O',
conversion_format, file_path, dest_path,
log_errors=putils.LOG_ALL_ERRORS)
if stderr:
raise RuntimeError(stderr)
os.rename(dest_path, file_path.split("file://")[-1])
return file_path
def revert(self, image_id, result=None, **kwargs):
# NOTE(flaper87): If result is None, it probably
# means this task failed. Otherwise, we would have
# a result from its execution.
if result is None:
return
fs_path = result.split("file://")[-1]
if os.path.exists(fs_path):
os.path.remove(fs_path)
def get_flow(**kwargs):
"""Return task flow for converting images to different formats.
:param task_id: Task ID.
:param task_type: Type of the task.
:param image_repo: Image repository used.
"""
task_id = kwargs.get('task_id')
task_type = kwargs.get('task_type')
image_repo = kwargs.get('image_repo')
return lf.Flow(task_type).add(
_Convert(task_id, task_type, image_repo),
)
| apache-2.0 |
roisagiv/webrtc-ios | third_party/jsoncpp/scons-tools/targz.py | 264 | 3055 | """tarball
Tool-specific initialization for tarball.
"""
## Commands to tackle a command based implementation:
##to unpack on the fly...
##gunzip < FILE.tar.gz | tar xvf -
##to pack on the fly...
##tar cvf - FILE-LIST | gzip -c > FILE.tar.gz
import os.path
import SCons.Builder
import SCons.Node.FS
import SCons.Util
try:
import gzip
import tarfile
internal_targz = 1
except ImportError:
internal_targz = 0
TARGZ_DEFAULT_COMPRESSION_LEVEL = 9
if internal_targz:
def targz(target, source, env):
def archive_name( path ):
path = os.path.normpath( os.path.abspath( path ) )
common_path = os.path.commonprefix( (base_dir, path) )
archive_name = path[len(common_path):]
return archive_name
def visit(tar, dirname, names):
for name in names:
path = os.path.join(dirname, name)
if os.path.isfile(path):
tar.add(path, archive_name(path) )
compression = env.get('TARGZ_COMPRESSION_LEVEL',TARGZ_DEFAULT_COMPRESSION_LEVEL)
base_dir = os.path.normpath( env.get('TARGZ_BASEDIR', env.Dir('.')).abspath )
target_path = str(target[0])
fileobj = gzip.GzipFile( target_path, 'wb', compression )
tar = tarfile.TarFile(os.path.splitext(target_path)[0], 'w', fileobj)
for source in source:
source_path = str(source)
if source.isdir():
os.path.walk(source_path, visit, tar)
else:
tar.add(source_path, archive_name(source_path) ) # filename, arcname
tar.close()
targzAction = SCons.Action.Action(targz, varlist=['TARGZ_COMPRESSION_LEVEL','TARGZ_BASEDIR'])
def makeBuilder( emitter = None ):
return SCons.Builder.Builder(action = SCons.Action.Action('$TARGZ_COM', '$TARGZ_COMSTR'),
source_factory = SCons.Node.FS.Entry,
source_scanner = SCons.Defaults.DirScanner,
suffix = '$TARGZ_SUFFIX',
multi = 1)
TarGzBuilder = makeBuilder()
def generate(env):
"""Add Builders and construction variables for zip to an Environment.
The following environnement variables may be set:
TARGZ_COMPRESSION_LEVEL: integer, [0-9]. 0: no compression, 9: best compression (same as gzip compression level).
TARGZ_BASEDIR: base-directory used to determine archive name (this allow archive name to be relative
to something other than top-dir).
"""
env['BUILDERS']['TarGz'] = TarGzBuilder
env['TARGZ_COM'] = targzAction
env['TARGZ_COMPRESSION_LEVEL'] = TARGZ_DEFAULT_COMPRESSION_LEVEL # range 0-9
env['TARGZ_SUFFIX'] = '.tar.gz'
env['TARGZ_BASEDIR'] = env.Dir('.') # Sources archive name are made relative to that directory.
else:
def generate(env):
pass
def exists(env):
return internal_targz
| bsd-3-clause |
glatard/nipype | nipype/interfaces/camino/tests/test_auto_DT2NIfTI.py | 9 | 1184 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import DT2NIfTI
def test_DT2NIfTI_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
header_file=dict(argstr='-header %s',
mandatory=True,
position=3,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='-inputfile %s',
mandatory=True,
position=1,
),
output_root=dict(argstr='-outputroot %s',
genfile=True,
position=2,
),
terminal_output=dict(nohash=True,
),
)
inputs = DT2NIfTI.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_DT2NIfTI_outputs():
output_map = dict(dt=dict(),
exitcode=dict(),
lns0=dict(),
)
outputs = DT2NIfTI.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
llasram/zmforth | zmerge.py | 2 | 2162 | #! /usr/bin/python
# Copyright (c) 2009 Marshall Vandegrift
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import sys
import os
from chunk import Chunk
import struct
from itertools import izip
class ZMergeError(Exception):
pass
def main(argv=sys.argv):
zinpath, savpath, zoutpath = argv[1:]
data = ''
with open(savpath, 'rb') as savf:
form = Chunk(savf)
if form.getname() != 'FORM':
raise ZMergeError('not a valid IFF file')
if form.read(4) != 'IFZS':
raise ZMergeError('not a valid QUETZAL save file')
mem = Chunk(form)
while mem.getname() not in ('CMem', 'UMem'):
mem.skip()
mem = Chunk(form)
if mem.getname() == 'UMem':
data = mem.read()
else:
data = []
byte = mem.read(1)
while byte:
data.append(byte)
if byte == '\x00':
count = struct.unpack('B', mem.read(1))[0]
data.append(byte * count)
byte = mem.read(1)
data = ''.join(data)
data = data[0x40:]
with open(zinpath, 'rb') as zinf:
header = zinf.read(0x40)
statmem = zinf.read(len(data))
himem = zinf.read()
statmem = ''.join([chr(ord(x) ^ ord(y)) for x, y in izip(data, statmem)])
with open(zoutpath, 'wb') as zoutf:
zoutf.write(header)
zoutf.write(statmem)
zoutf.write(himem)
return 0
if __name__ == '__main__':
sys.exit(main())
| gpl-3.0 |
jbaginski/androguard | elsim/elsim/similarity/simhash.py | 44 | 2143 | """
Implementation of Charikar similarity hashes in Python.
Most useful for creating 'fingerprints' of documents or metadata
so you can quickly find duplicates or cluster items.
Part of python-hashes by sangelone. See README and LICENSE.
"""
from hashtype import hashtype
class simhash(hashtype):
def create_hash(self, tokens):
"""Calculates a Charikar simhash with appropriate bitlength.
Input can be any iterable, but for strings it will automatically
break it into words first, assuming you don't want to iterate
over the individual characters. Returns nothing.
Reference used: http://dsrg.mff.cuni.cz/~holub/sw/shash
"""
if type(tokens) == str:
tokens = tokens.split()
v = [0]*self.hashbits
for t in [self._string_hash(x) for x in tokens]:
bitmask = 0
for i in xrange(self.hashbits):
bitmask = 1 << i
if t & bitmask:
v[i] += 1
else:
v[i] -= 1
fingerprint = 0
for i in xrange(self.hashbits):
if v[i] >= 0:
fingerprint += 1 << i
self.hash = fingerprint
def _string_hash(self, v):
"A variable-length version of Python's builtin hash. Neat!"
if v == "":
return 0
else:
x = ord(v[0])<<7
m = 1000003
mask = 2**self.hashbits-1
for c in v:
x = ((x*m)^ord(c)) & mask
x ^= len(v)
if x == -1:
x = -2
return x
def similarity(self, other_hash):
"""Calculate how different this hash is from another simhash.
Returns a float from 0.0 to 1.0 (inclusive)
"""
if type(other_hash) != simhash:
raise Exception('Hashes must be of same type to find similarity')
b = self.hashbits
if b!= other_hash.hashbits:
raise Exception('Hashes must be of equal size to find similarity')
return float(b - self.hamming_distance(other_hash)) / b
| apache-2.0 |
CuonDeveloper/cuon | cuon_client/cuon/bin/Adressenconv.py | 5 | 2725 | # -*- coding: utf-8 -*-
import sys,os
notconvert=['Autoelektrik','Brauereibedarf', 'Bildhauer', 'Bauelemente', 'Feuerwehren']
format = sys.argv[1]
CalcFiles = os.listdir('./')
print CalcFiles
for onefile in CalcFiles:
if onefile[len(onefile)-4:] == '.xls':
os.system('unoconvCSV.py ' + onefile)
CalcFiles = os.listdir('./')
print CalcFiles
for onefile in CalcFiles:
print "file in bearbeitung", onefile
sPath = os.path.abspath(os.getcwd())
fname = onefile.split('.')[0]
Branche = fname
if onefile[len(onefile)-4:] == '.csv':
print 'found csv file'
if Branche in notconvert:
pass
else:
Branche = Branche.replace("ae", "ä")
Branche = Branche.replace("ue", "ü")
Branche = Branche.replace("oe", "ö")
print "replaces :------------------------>>>>>" , Branche
Branche = Branche.replace("_", " ")
infile = open(onefile)
outfile = open(fname + "typ", "a")
s1 = infile.readline()
while s1:
s1 = s1[:len(s1)-1] + ";"+ '"' + Branche +'"\n'
s1 = s1.replace("+49 (", "0")
s1 = s1.replace(") ", "/")
outfile.write(s1)
s1 = infile.readline()
infile.close
outfile.close()
os.system('mv ' + fname + "typ" + " " + onefile)
fname + ".ctrl"
ctrlfile = open( fname + ".ctrl", "a")
ctrlfile.write('filename = '+ sPath +"/" + onefile + "\n")
ctrlfile.write('table = address' + "\n")
ctrlfile.write('header = yes' + "\n")
ctrlfile.write('splitvalue = ;' + "\n")
ctrlfile.write('from_changed_value = "' + "\n")
ctrlfile.write("decode_data = 'utf-8'" + "\n")
ctrlfile.write('check_update_field = phone' + "\n")
ctrlfile.write('column = address,string' + "\n")
ctrlfile.write('column = firstname,string' + "\n")
ctrlfile.write('column = lastname,string' + "\n")
ctrlfile.write('column = street,string' + "\n")
ctrlfile.write('column = city,string' + "\n")
ctrlfile.write('column = zip,string' + "\n")
ctrlfile.write('column = phone,string' + "\n")
ctrlfile.write('column = fax,string' + "\n")
ctrlfile.write('column = phone_handy,string' + "\n")
ctrlfile.write('column = email,string' + "\n")
ctrlfile.write('column = homepage_url,string' + "\n")
ctrlfile.write('column = lastname2,string' + "\n")
ctrlfile.close()
#except Exception, params:
# print Exception, params
| gpl-3.0 |
DarrenRainey/gpicsync | gpicsync-GUI.py | 19 | 71580 | #!/usr/bin/python
# -*- coding: utf-8 -*-
###############################################################################
#
# Developer: francois.schnell francois.schnell@gmail.com
# http://francois.schnell.free.fr
#
# Contributors, see: http://code.google.com/p/gpicsync/wiki/Contributions
#
# This application is released under the GPL license version 2
#
# More informations and help can be found here: http://code.google.com/p/gpicsync/
#
################################################################################
"""
GUI Part of GPicSync, a Free Software tool to geolocalize informations from:
- a GPS (.gpx file)
- pictures from a camera
The resulting pictures have latitude/longitude informations in their EXIF
meta-data and can be used with software or webservice which can read them
(like Flickr or Google Earth)
More informations at this URL:
http://code.google.com/p/gpicsync/
"""
# trying wxpython 2.9.3.1 for unicode geonames problems
from unidecode import unidecode # unicode to ascii (see issue 117 regarding Python 2.x and command line to exiftool containing unicode)
if 0:
import wxversion
wxversion.select("2.9")
import wx,wx.lib.colourdb
print wx.VERSION
import time,decimal,gettext,shutil,ConfigParser
import os,sys,fnmatch,zipfile,subprocess
import traceback
if sys.platform == 'win32':
import win32com.client
from thread import start_new_thread
from PIL import Image
from PIL import JpegImagePlugin
from PIL import GifImagePlugin
from geoexif import *
from gpx import *
from gpicsync import *
from kmlGen import *
from geonames import *
import locale
codeset = locale.getdefaultlocale()[1]
if 1: # checking wx version installed (for unicde dev)
import wxversion
print "wxversion", wxversion.getInstalled()
print "Python verion is",(sys.version)
try:
import pytz
except ImportError:
print "couldn't import pytz"
timezones = []
else:
timezones = pytz.common_timezones
if 0: # tests
import pytz
timezones = pytz.common_timezones
print timezones
fzones=open("zones.txt","w")
fzones.write(str(timezones))
fzones.close()
class GUI(wx.Frame):
"""Main Frame of GPicSync"""
def __init__(self,parent, title):
"""Initialize the main frame"""
global bkg
wx.Frame.__init__(self, parent, wx.ID_ANY, title="GPicSync",size=(1000,600))
favicon = wx.Icon('gpicsync.ico', wx.BITMAP_TYPE_ICO, 16, 16)
wx.Frame.SetIcon(self, favicon)
self.tcam_l="00:00:00"
self.tgps_l="00:00:00"
self.log=False
self.stop=False
self.interpolation=False
self.picDir=""
self.timezone=None
self.utcOffset="0"
self.backup=True
self.picDirDefault=""
self.GMaps=False
self.urlGMaps=""
self.geonamesTags=False
self.geoname_nearbyplace=True
self.geoname_region=True
self.geoname_country=True
self.geoname_summary=True
self.geoname_caption=True
self.datesMustMatch=True
self.geoname_userdefine=""
self.maxTimeDifference="300"
self.language="English"
self.timeStamp=False
self.defaultLat="0.000000"
self.defaultLon="0.000000"
self.geoname_IPTCsummary=""
# Search for an eventual gpicsync.conf file
configFile=False
if sys.platform=="win32":
confPath=os.environ["ALLUSERSPROFILE"]+"/gpicsync.conf"
print "Searching configuration file "+confPath
if os.path.isfile(confPath):
configFile=True
fconf=open(os.environ["ALLUSERSPROFILE"]+"/gpicsync.conf","r+")
else: configFile= False
if sys.platform==("linux2" or "darwin"):
confPath=os.path.expanduser("~/.gpicsync.conf")
print "Searching configuration file ~/.gpicsync.conf"
if os.path.isfile(confPath):
configFile=True
fconf=open(os.path.expanduser("~/.gpicsync.conf"),"r+")
else: configFile=False
if configFile==False:
print "Couldn't find the configuration file."
dialog=wx.MessageDialog(self,message="Couldn't find the configuration file",
style=wx.OK|wx.CANCEL|wx.ICON_INFORMATION)
dialog.ShowModal()
wx.CallAfter(self.consolePrint,"\n"+"Couldn't find the configuration file."+"\n")
print "Attempting to read the configuration file..."
#try:
if configFile!=False:
conf= ConfigParser.ConfigParser()
conf.readfp(fconf) #parse the config file
if conf.has_option("gpicsync","timezone") == True:
self.timezone=conf.get("gpicsync","timezone")
if self.timezone=="": self.timezone=None
print "Timezone is :"+str(self.timezone)
if conf.has_option("gpicsync","UTCOffset") == True:
self.utcOffset=conf.get("gpicsync","utcoffset")
if conf.has_option("gpicsync","backup") == True:
self.backup=eval(conf.get("gpicsync","backup"))
if conf.has_option("gpicsync","urlGMaps") == True:
self.urlGMaps=conf.get("gpicsync","urlGMaps")
if conf.has_option("gpicsync","geonamesTags") == True:
self.geonamesTags=eval(conf.get("gpicsync","geonamesTags"))
if conf.has_option("gpicsync","interpolation") == True:
self.interpolation=eval(conf.get("gpicsync","interpolation"))
if conf.has_option("gpicsync","datesMustMatch") == True:
self.datesMustMatch=eval(conf.get("gpicsync","datesMustMatch"))
if conf.has_option("gpicsync","log") == True:
self.log=eval(conf.get("gpicsync","log"))
if conf.has_option("gpicsync","GMaps") == True:
self.GMaps=eval(conf.get("gpicsync","GMaps"))
if conf.has_option("gpicsync","UTCOffset") == True:
self.utcOffset=conf.get("gpicsync","UTCOffset")
if conf.has_option("gpicsync","maxTimeDifference") == True:
self.maxTimeDifference=conf.get("gpicsync","maxTimeDifference")
if conf.has_option("gpicsync","language") == True:
self.language=conf.get("gpicsync","language")
if conf.has_option("gpicsync","geoname_nearbyplace") == True:
self.geoname_nearbyplace=eval(conf.get("gpicsync","geoname_nearbyplace"))
if conf.has_option("gpicsync","geoname_region") == True:
self.geoname_region=eval(conf.get("gpicsync","geoname_region"))
if conf.has_option("gpicsync","geoname_country") == True:
self.geoname_country=eval(conf.get("gpicsync","geoname_country"))
if conf.has_option("gpicsync","geoname_summary") == True:
self.geoname_summary=eval(conf.get("gpicsync","geoname_summary"))
if conf.has_option("gpicsync","geoname_userdefine") == True:
self.geoname_userdefine=conf.get("gpicsync","geoname_userdefine")
if conf.has_option("gpicsync","geoname_caption") == True:
self.geoname_caption=eval(conf.get("gpicsync","geoname_caption"))
if conf.has_option("gpicsync","geoname_IPTCsummary") == True:
self.geoname_IPTCsummary=conf.get("gpicsync","geoname_IPTCsummary")
if conf.has_option("gpicsync","defaultdirectory") == True:
self.picDir=conf.get("gpicsync","defaultdirectory")
if conf.has_option("gpicsync","getimestamp") == True:
self.timeStamp=eval(conf.get("gpicsync","getimestamp"))
fconf.close()
#except:
if 0:
wx.CallAfter(self.consolePrint,"\n"
+"An error happened while reading the configuration file."+"\n")
try:
#print self.language
locale_dir="locale"
if self.language=="system":
lang = gettext.translation('gpicsync-GUI', locale_dir, codeset=codeset)
lang.install()
elif self.language=="French":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['fr'], codeset=codeset)
lang.install()
elif self.language=="Italian":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['it'], codeset=codeset)
lang.install()
elif self.language=="German":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['de'], codeset=codeset)
lang.install()
elif self.language=="S.Chinese":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['zh_CN'], codeset=codeset)
lang.install()
elif self.language=="T.Chinese":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['zh_TW'], codeset=codeset)
lang.install()
elif self.language=="Catalan":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['ca'], codeset=codeset)
lang.install()
elif self.language=="Spanish":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['es'], codeset=codeset)
lang.install()
elif self.language=="Polish":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['pl'], codeset=codeset)
lang.install()
elif self.language=="Dutch":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['nl'], codeset=codeset)
lang.install()
elif self.language=="Portuguese":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['pt'], codeset=codeset)
lang.install()
elif self.language=="Czech":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['cs'], codeset=codeset)
lang.install()
elif self.language=="Russian":
lang = gettext.translation('gpicsync-GUI', locale_dir, languages=['ru'], codeset=codeset)
lang.install()
else:
gettext.install('gpicsync-GUI', "None")
except:
print "Couldn't load translation."
del locale_dir
##### Menus #####
bkg=wx.Panel(self)
#bkg.SetBackgroundColour((244,180,56))
menuBar=wx.MenuBar()
menu1=wx.Menu()
timeShift=menu1.Append(wx.NewId(),_("Local time correction"))
languageChoice=menu1.Append(wx.NewId(),_("Language"))
self.Bind(wx.EVT_MENU,self.languageApp,languageChoice)
if sys.platform == 'win32':
configFile=menu1.Append(wx.NewId(),_("Configuration file"))
self.Bind(wx.EVT_MENU,self.showConfig,configFile)
menuBar.Append(menu1,_("&Options"))
menu2=wx.Menu()
about=menu2.Append(wx.NewId(),_("About..."))
menuTools=wx.Menu()
menuBar.Append(menuTools,_("&Tools"))
exifReader=menuTools.Append(wx.NewId(),_("EXIF reader"))
exifGeoWriter=menuTools.Append(wx.NewId(),_("EXIF writer"))
renameToolMenu=menuTools.Append(wx.NewId(),_("Geo-Rename pictures"))
gpxInspectorMenu=menuTools.Append(wx.NewId(),_("GPX Inspector"))
kmzGeneratorMenu=menuTools.Append(wx.NewId(),_("KMZ Generator"))
menuBar.Append(menu2,_("&Help"))
statusBar=self.CreateStatusBar()
self.Bind(wx.EVT_MENU,self.localtimeFrame,timeShift)
self.Bind(wx.EVT_MENU,self.aboutApp,about)
self.Bind(wx.EVT_MENU,self.exifFrame,exifReader)
self.Bind(wx.EVT_MENU,self.geoWriterFrame,exifGeoWriter)
self.Bind(wx.EVT_MENU,self.renameFrame,renameToolMenu)
self.Bind(wx.EVT_MENU,self.gpxInspectorFrame,gpxInspectorMenu)
self.Bind(wx.EVT_MENU,self.kmzGeneratorFrame,kmzGeneratorMenu)
##### Mains panel widgets definitions #####
# Pictures dir and Gpx search buttons
dirButton=wx.Button(bkg,size=(150,-1),label=_("Pictures folder"))
gpxButton=wx.Button(bkg,size=(150,-1),label=_("GPS file"))
self.dirEntry=wx.TextCtrl(bkg)
self.gpxEntry=wx.TextCtrl(bkg)
self.Bind(wx.EVT_BUTTON, self.findPictures, dirButton)
self.Bind(wx.EVT_BUTTON, self.findGpx, gpxButton)
# Commands buttons (sync,quit,stop,etc)
syncButton=wx.Button(bkg,size=(250,-1),label=_(" Synchronise ! "))
quitButton=wx.Button(bkg,label=_("Quit"),size=(-1,-1))
quitAndSaveButton=wx.Button(bkg,label=_("Quit and save settings"),size=(-1,-1))
stopButton=wx.Button(bkg,label=_("Stop"),size=(-1,-1))
clearButton=wx.Button(bkg,label=_("Clear"),size=(-1,-1))
viewInGEButton=wx.Button(bkg,label=_("View in Google Earth"),size=(-1,-1))
self.Bind(wx.EVT_BUTTON, self.syncPictures, syncButton)
self.Bind(wx.EVT_BUTTON, self.exitApp,quitButton)
self.Bind(wx.EVT_BUTTON, self.exitAppSave,quitAndSaveButton)
self.Bind(wx.EVT_BUTTON, self.stopApp,stopButton)
self.Bind(wx.EVT_BUTTON, self.clearConsole,clearButton)
self.Bind(wx.EVT_BUTTON, self.viewInGE,viewInGEButton)
# Main Options box
optionPrebox=wx.StaticBox(bkg, -1, _("Options:"))
optionbox=wx.StaticBoxSizer(optionPrebox, wx.VERTICAL)
# Elevation options
eleLabel=wx.StaticText(bkg, -1," "+_("Elevation")+":")
eleList=[_("Clamp to the ground"),
_("absolute value (for flights)"),_("absolute value + extrude (for flights)")]
self.elevationChoice=wx.Choice(bkg, -1, (-1,-1), choices = eleList)
self.elevationChoice.SetSelection(0)
# Google Earth Icons choice
iconsLabel=wx.StaticText(bkg, -1," "+_("Icons")+":")
iconsList=[_("picture thumb"),
_("camera icon")]
self.iconsChoice=wx.Choice(bkg, -1, (-1,-1), choices = iconsList)
self.iconsChoice.SetSelection(0)
# Geonames options
tmp1=_("Geonames in specific IPTC fields")
tmp2=_("Geonames in XMP format")
gnOptList=[_("Geonames in IPTC + HTML Summary in IPTC caption"),_("Geonames in IPTC"),
_("Geonames/geotagged in EXIF keywords + HTML summary in IPTC caption"),_("Geonames/geotagged in EXIF keywords")]
self.gnOptChoice=wx.Choice(bkg, -1, (-1,-1), choices = gnOptList)
self.gnOptChoice.SetSelection(0)
# UTC value and timezone
self.utcLabel = wx.StaticText(bkg, -1,_("UTC Offset="))
self.utcEntry=wx.TextCtrl(bkg,size=(40,-1))
self.utcEntry.SetValue(self.utcOffset)
if timezones:
#if 1:
tzLabel = wx.StaticText(bkg, -1,_("Select time zone:"))
self.tzButton = wx.Button(bkg, -1, _("Manual UTC offset"), size=(150,-1), style=wx.BU_LEFT)
if self.timezone:
self.tzButton.SetLabel(self.timezone)
self.utcLabel.Disable()
self.utcEntry.Disable()
self.tzMenu = wx.Menu()
manualTZmenu = self.tzMenu.Append(wx.NewId(), _("Manual UTC offset"))
self.Bind(wx.EVT_MENU, self.manualTZ, manualTZmenu)
tz_regions = {}
for i,item in enumerate(timezones):
items = item.split('/')
reg = ""
menu = self.tzMenu
for r in items[:-1]:
reg += '/' + r
if reg not in tz_regions:
newmenu = wx.Menu()
menu.AppendMenu(-1, r, newmenu)
menu = newmenu
tz_regions[reg] = menu
else:
menu = tz_regions[reg]
z = items[-1]
menu.Append(3000+i, z)
self.Bind(wx.EVT_MENU, self.selectTZ, id=3000+i)
self.Bind(wx.EVT_BUTTON, self.tzMenuPopup, self.tzButton)
# Timerange
timerangeLabel=wx.StaticText(bkg, -1,_("Geocode picture only if time difference to nearest track point is below (seconds)="))
self.timerangeEntry=wx.TextCtrl(bkg,size=(40,-1))
self.timerangeEntry.SetValue(self.maxTimeDifference)
# Log file, dateCheck (deprecated)
self.logFile=wx.CheckBox(bkg,-1,_("Create a log file in picture folder"))
self.logFile.SetValue(self.log)
self.dateCheck=wx.CheckBox(bkg,-1,_("Dates must match"))
self.dateCheck.SetValue(self.datesMustMatch)
self.dateCheck.Hide()
# Google Earth and Google Maps
self.geCheck=wx.CheckBox(bkg,-1,_("Create a Google Earth file")+": ")
self.geCheck.SetValue(True)
self.geCheck.Hide()
geInfoLabel=wx.StaticText(bkg, -1," "+"[Google Earth]->")
self.geTStamps=wx.CheckBox(bkg,-1,_("with TimeStamp"))
self.geTStamps.SetValue(self.timeStamp)
self.gmCheck=wx.CheckBox(bkg,-1,_("Google Maps export, folder URL="))
self.gmCheck.SetValue(self.GMaps)
self.urlEntry=wx.TextCtrl(bkg,size=(500,-1))
self.urlEntry.SetValue(self.urlGMaps)
# backup, interpolations mod and geonames
self.backupCheck=wx.CheckBox(bkg,-1,_("backup pictures"))
self.backupCheck.SetValue(self.backup)
self.interpolationCheck=wx.CheckBox(bkg,-1,_("interpolation"))
self.interpolationCheck.SetValue(self.interpolation)
self.geonamesCheck=wx.CheckBox(bkg,-1,_("add geonames and geotagged"))
self.geonamesCheck.SetValue(self.geonamesTags)
# Main output text console
self.consoleEntry=wx.TextCtrl(bkg,style=wx.TE_MULTILINE | wx.HSCROLL)
##### GUI LAYOUT / SIZERS #####
# directory and GPX choices sizer
dirChoiceBox=wx.BoxSizer()
dirChoiceBox.Add(dirButton,proportion=0,flag=wx.LEFT,border=5)
dirChoiceBox.Add(self.dirEntry,proportion=1,flag=wx.EXPAND)
gpxChoiceBox=wx.BoxSizer()
gpxChoiceBox.Add(gpxButton,proportion=0,flag=wx.LEFT,border=5)
gpxChoiceBox.Add(self.gpxEntry,proportion=1,flag=wx.EXPAND)
# Google Earth elevation and time stamp horizontal sizer
gebox=wx.BoxSizer()
gebox.Add(geInfoLabel,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
gebox.Add(iconsLabel,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=10)
gebox.Add(self.iconsChoice,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=10)
gebox.Add(eleLabel,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=10)
gebox.Add(self.elevationChoice,flag= wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=10)
gebox.Add(self.geTStamps,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL, border=10)
# Google maps export and associated URL
gmbox=wx.BoxSizer()
gmbox.Add(self.gmCheck,proportion=0,flag=wx.EXPAND| wx.LEFT,border=10)
gmbox.Add(self.urlEntry,proportion=0,flag=wx.EXPAND| wx.ALL,border=1)
# line with log check, interpolation check and backup check
settingsbox=wx.BoxSizer()
settingsbox.Add(self.logFile,proportion=0,flag=wx.LEFT| wx.ALL,border=10)
#settingsbox.Add(self.dateCheck,proportion=0,flag=wx.LEFT| wx.ALL,border=10)
settingsbox.Add(self.interpolationCheck,proportion=0,flag=wx.LEFT| wx.ALL,border=10)
settingsbox.Add(self.backupCheck,proportion=0,flag=wx.EXPAND| wx.ALL,border=10)
# Image preview box
prebox=wx.StaticBox(bkg, -1, _("Image preview:"),size=(200,200))
previewbox=wx.StaticBoxSizer(prebox, wx.VERTICAL)
self.imgWhite=wx.Image('default.jpg', wx.BITMAP_TYPE_ANY).ConvertToBitmap()
self.imgPrev=wx.StaticBitmap(bkg,-1,self.imgWhite,size=(160,160))#style=wx.SIMPLE_BORDER
previewbox.Add(self.imgPrev, 0, flag= wx.ALIGN_CENTER_VERTICAL|wx.ALIGN_CENTER_HORIZONTAL,border=10)
# Geonames line
gnhbox=wx.BoxSizer()
gnhbox.Add(self.geonamesCheck,proportion=0,flag=wx.EXPAND| wx.LEFT,border=10)
gnhbox.Add(self.gnOptChoice,proportion=0,flag=wx.EXPAND| wx.LEFT,border=10)
# UTC and timezone line
utcBox=wx.BoxSizer()
if timezones:
utcBox.Add(tzLabel,proportion=0,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
utcBox.Add(self.tzButton,proportion=0,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
utcBox.Add(self.utcLabel,proportion=0,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
utcBox.Add(self.utcEntry,proportion=0,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
# Timerange line
rangeBox = wx.BoxSizer()
rangeBox.Add(timerangeLabel,proportion=0,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
rangeBox.Add(self.timerangeEntry,proportion=0,flag=wx.LEFT|wx.ALIGN_CENTER_VERTICAL,border=10)
# commands line
commandsBox=wx.BoxSizer()
commandsBox.Add(syncButton,proportion=0,flag=wx.LEFT,border=5)
commandsBox.Add(stopButton,proportion=0,flag=wx.LEFT,border=5)
commandsBox.Add(clearButton,proportion=0,flag=wx.LEFT,border=5)
commandsBox.Add(viewInGEButton,proportion=0,flag=wx.LEFT,border=5)
commandsBox.Add(quitButton,proportion=0,flag=wx.LEFT,border=5)
commandsBox.Add(quitAndSaveButton,proportion=0,flag=wx.LEFT,border=5)
# select picture directory and GPX box
headerbox=wx.BoxSizer(wx.VERTICAL)
headerbox.Add(dirChoiceBox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
headerbox.Add(gpxChoiceBox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
optionbox.Add(gebox,proportion=0,flag=wx.ALL,border=7)
optionbox.Add(gmbox,proportion=0,flag=wx.ALL,border=7)
optionbox.Add(settingsbox,proportion=0,flag=wx.ALL,border=7)
optionbox.Add(gnhbox,proportion=0,flag=wx.ALL,border=7)
# Options box + picture preview sizer
middlebox=wx.BoxSizer()
middlebox.Add(optionbox,proportion=1,flag=wx.LEFT,border=15)
middlebox.Add(previewbox,proportion=0,flag=wx.LEFT,border=20)
footerbox=wx.BoxSizer(wx.VERTICAL)
footerbox.Add(utcBox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
footerbox.Add(rangeBox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
footerbox.Add(commandsBox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
footerbox.Add(self.consoleEntry,proportion=1,flag=wx.EXPAND | wx.LEFT, border=5)
allBox= wx.BoxSizer(wx.VERTICAL)
allBox.Add(headerbox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
allBox.Add(middlebox,proportion=0,flag=wx.EXPAND | wx.ALL,border=5)
allBox.Add(footerbox,proportion=1,flag=wx.EXPAND | wx.ALL,border=5)
#bkg.SetSizer(vbox)
bkg.SetSizer(allBox)
self.SetMenuBar(menuBar)
self.Show(True)
if sys.platform == 'darwin':
self.SetSize(self.GetSize()+(100,50))
if sys.platform == 'win32':
self.exifcmd = 'exiftool.exe'
else:
self.exifcmd = 'exiftool'
def writeConfFile(self):
"""Write the whole configuration file"""
try:
fconf=open(os.environ["ALLUSERSPROFILE"]+"/gpicsync.conf","r+")
except:
fconf=open(os.path.expanduser("~/.gpicsync.conf"),"w")
header="#This is a configuration file for GPicSync geocoding software\n"+\
"#Read the comments below to see what you can set. Boolean value (True or False) and\n"+\
"#the default language option must always begin with a Capital Letter\n\n[gpicsync]\n\n"
fconf.write(header)
fconf.write("#Default language at start-up that you can also change in 'options'>'languages'\n")
fconf.write("language="+self.language+"\n\n")
fconf.write("#Default Time Zone\n")
if self.timezone:
fconf.write("timezone="+str(self.timezone)+"\n\n")
else:
fconf.write("timezone=\n\n")
fconf.write("#Default UTC Offset\n")
fconf.write("utcoffset="+self.utcEntry.GetValue()+"\n\n")
fconf.write("#geocode picture only if time difference to nearest trackpoint is below X seconds\n")
fconf.write("maxtimedifference="+str(self.timerangeEntry.GetValue())+"\n\n")
fconf.write("#Backup pictures by default (True or False)\n")
fconf.write("backup="+str(self.backupCheck.GetValue())+"\n\n")
fconf.write("#geolocalize pictures by default only if dates match by default (True or False)\n")
fconf.write("datesmustmatch="+str(self.dateCheck.GetValue())+"\n\n")
fconf.write("#Enable TimeStamp option for the Google Earth doc.kml file (True or False)\n")
fconf.write("getimestamp="+str(self.geTStamps.GetValue())+"\n\n")
fconf.write("#Create a Google Map export (doc-web.kml) by default (True or False)\n")
fconf.write("gmaps="+str(self.gmCheck.GetValue())+"\n\n")
fconf.write("#Default base URL for Google Maps export\n")
fconf.write("urlgmaps="+self.urlEntry.GetValue()+"\n\n")
fconf.write("#Use the interpolation mode by default (True or False)\n")
fconf.write("interpolation="+str(self.interpolationCheck.GetValue())+"\n\n")
fconf.write("#Create a log file by default\n")
fconf.write("log="+str(self.logFile.GetValue())+"\n\n")
fconf.write("#Add geonames and geotagged in EXIF by default (True or False) and select the ones you want\n")
fconf.write("geonamestags="+str(self.geonamesCheck.GetValue())+"\n")
fconf.write("geoname_nearbyplace="+str(self.geoname_nearbyplace)+"\n")
fconf.write("geoname_region="+str(self.geoname_region)+"\n")
fconf.write("geoname_country="+str(self.geoname_country)+"\n")
fconf.write("geoname_summary="+str(self.geoname_summary)+"\n")
fconf.write("geoname_userdefine="+self.geoname_userdefine+"\n\n")
fconf.write("#Add summary in IPTC with the following variables (if you use quotes escape them: \\\" ):\n")
fconf.write("#{LATITUDE} {LONGITUDE} {DISTANCETO} {NEARBYPLACE} {REGION} {COUNTRY} {ORIENTATION} \n")
fconf.write("geoname_caption="+str(self.geoname_caption)+"\n")
fconf.write("geoname_IPTCsummary="+str(self.geoname_IPTCsummary)+"\n\n")
fconf.write("#Set default or last directory automatically used\n")
fconf.write("Defaultdirectory="+self.picDir)
fconf.write("")
fconf.close()
def showConfig(self,evt):
"""open the configuration file in notepad.exe"""
os.popen('notepad.exe "%s"'% (os.environ["ALLUSERSPROFILE"]+"/gpicsync.conf"))
wx.CallAfter(self.consolePrint,"\n"+_("If you've changed and saved the configuration file you should restart the application to take effect.")+"\n")
def consolePrint(self,msg):
"""
Print the given message in the console window
(GUI safe to call with a CallAfter in threads to avoid refresh problems)
"""
self.consoleEntry.AppendText(msg)
def imagePreview(self,prevPath=""):
""" GUI Image preview"""
Img=wx.Image(prevPath,wx.BITMAP_TYPE_JPEG)
Img.Scale(width=160,height=160)
Img.SetRGB(0,0, 235,233,237)
self.imgPrev.SetBitmap(self.imgWhite)
self.imgPrev.SetBitmap(wx.BitmapFromImage(Img))
def languageApp(self,evt):
"""
select a language to display the GUI with
"""
choices = [ 'system', 'Catalan','S.Chinese','T.Chinese','Czech','Dutch','English', 'French',
'German','Italian','Polish','Portuguese','Russian','Spanish']
dialog=wx.SingleChoiceDialog(self,_("Choose a language"),_("languages choice"),choices)
dialog.SetSelection(choices.index(self.language))
if dialog.ShowModal() == wx.ID_OK:
choice=dialog.GetStringSelection()
print "choice is : ", choice
self.language=choice
wx.CallAfter(self.consolePrint,"\n"+"Next time you launch GPicSync it will be in "+self.language+".\n")
self.writeConfFile()
dialog.Destroy()
else:
dialog.Destroy()
def aboutApp(self,evt):
"""An about message dialog"""
text="GPicSync 1.30 - 2012 - \n\n"\
+"GPicSync is Free Software (GPL v2)\n\n"\
+_("More informations and help:")+"\n\n"+\
"http://code.google.com/p/gpicsync/"+"\n\n"\
+"2012 - francois.schnell@gmail.com"
dialog=wx.MessageDialog(self,message=text,
style=wx.OK|wx.CANCEL|wx.ICON_INFORMATION)
dialog.ShowModal()
def geoWriterFrame(self,evt):
""" Frame to manually write latitude/longitude in the EXIF header of the picture"""
self.winGeoFrame=wx.Frame(win,size=(300,300),title=_("Manual latitude/longitude EXIF writer"))
bkg=wx.Panel(self.winGeoFrame)
instructionLabel = wx.StaticText(bkg, -1,_("Enter coordinates in decimal degrees"))
latLabel = wx.StaticText(bkg, -1,_("Latitude")+":")
self.latEntry=wx.TextCtrl(bkg,size=(100,-1))
self.latEntry.SetValue(str(self.defaultLat))
lonLabel = wx.StaticText(bkg, -1,_("Longitude")+":")
self.lonEntry=wx.TextCtrl(bkg,size=(100,-1))
self.lonEntry.SetValue(str(self.defaultLon))
eleLabel = wx.StaticText(bkg, -1,_("Eventual elevation (meters)")+":")
self.eleEntry=wx.TextCtrl(bkg,size=(100,-1))
selectButton=wx.Button(bkg,size=(-1,-1),label=_("Select and write in picture(s)"))
self.Bind(wx.EVT_BUTTON, self.manualGeoWrite, selectButton)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(instructionLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(latLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.latEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(lonLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.lonEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(eleLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.eleEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(selectButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
bkg.SetSizer(vbox)
self.winGeoFrame.Show()
def manualGeoWrite(self,evt):
"""manually write latitude/longitude in the EXIF header of the picture"""
picture=wx.FileDialog(self,style=wx.FD_MULTIPLE)
picture.ShowModal()
self.winGeoFrame.Hide()
latitude=self.latEntry.GetValue()
self.defaultLat=latitude
longitude=self.lonEntry.GetValue()
elevation=self.eleEntry.GetValue()
self.winGeoFrame.Close()
self.defaultLon=longitude
self.pathPictures=picture.GetPaths()
#print "###############", self.pathPictures
wx.CallAfter(self.consolePrint,"\n---\n")
def writeEXIF(latitude,longitude,latRef,longRef):
if len(self.pathPictures)!=0:
for pic in self.pathPictures:
wx.CallAfter(self.consolePrint,_("Writing GPS latitude/longitude ")+\
latRef+latitude+" / "+longRef+longitude+" ---> "+os.path.basename(pic)+"\n")
if elevation!="":
eleExif= " -GPSAltitude="+elevation+" -GPSAltitudeRef=0 "
else: eleExif=""
"""
order='%s -n "-DateTimeOriginal>FileModifyDate" \
-GPSLatitude=%s -GPSLongitude=%s %s\
-GPSLatitudeRef=%s -GPSLongitudeRef=%s "%s" '\
%(self.exifcmd,latitude,longitude,eleExif, latRef,longRef,pic)
print order
"""
os.popen('%s -n "-DateTimeOriginal>FileModifyDate" \
-GPSLatitude=%s -GPSLongitude=%s %s\
-GPSLatitudeRef=%s -GPSLongitudeRef=%s "%s" '\
%(self.exifcmd,latitude,longitude,eleExif, latRef,longRef,pic))
wx.CallAfter(self.consolePrint,"---"+_("Finished")+"---\n")
try:
if float(latitude)>0:
latRef="N"
else: latRef="S"
if float(longitude)>0:
longRef="E"
else: longRef="W"
latitude=str(abs(float(latitude)))
longitude=str(abs(float(longitude)))
start_new_thread(writeEXIF,(latitude,longitude,latRef,longRef))
except:
wx.CallAfter(self.consolePrint,"\n"+_("Latitude or Longitude formats are not valid: no geocoding happened.")+"\n")
def viewInGE(self,evt):
"""View a local kml file in Google Earth"""
if sys.platform == 'win32':
googleEarth =win32com.client.Dispatch("GoogleEarth.ApplicationGE")
else:
if sys.platform.find("linux")!=-1:
p=subprocess.Popen(['which', 'googleearth', 'google-earth'], stdout=subprocess.PIPE)
googleEarthPaths= p.stdout.read().splitlines()
if(len(googleEarthPaths) > 0):
# take first found google earth binary
googleEarth = googleEarthPaths[0]
else:
googleEarth= os.path.expanduser("~/google-earth/googleearth")
else:
if sys.platform == 'darwin':
googleEarth= "/Applications/Google\ Earth.app/Contents/MacOS/Google\ Earth"
try:
path=self.picDir+'/doc.kml'
print "path=",path
except:
text=_("To visualize the results in Google Earth you must either:")+"\n\n"\
+_("- finish a synchronisation")+"\n"\
+("- select a folder you've already synchronized or double-click on the kml file in his folder'")
wx.CallAfter(self.consolePrint,text)
try:
if sys.platform == 'win32':
googleEarth.OpenKmlFile(path,True)
else:
if sys.platform.find("linux")!=-1:
def goGELinux():
os.system(googleEarth +" "+path)
start_new_thread(goGELinux,())
else:
if sys.platform == 'darwin':
def goGEOSX():
os.system(googleEarth +" "+path)
start_new_thread(goGEOSX,())
except:
wx.CallAfter(self.consolePrint,"\n"+_("Couldn't find or launch Google Earth")+"\n")
def exitApp(self,evt):
"""Quit properly the app"""
print "Exiting the app..."
self.Close()
self.Destroy()
sys.exit(1)
def exitAppSave(self,evt):
"""Quit properly the app and save current settings in configuration file"""
print "Exiting the app and save settings..."
self.writeConfFile()
self.Close()
self.Destroy()
sys.exit(1)
def stopApp(self,evt):
"""Stop current processing"""
self.stop=True
def clearConsole(self,evt):
"""clear the output console"""
self.imgPrev.SetBitmap(self.imgWhite)
self.consoleEntry.Clear()
self.imgPrev.SetBitmap(self.imgWhite)
def findGpx(self,evt):
"""
Select the .gpx file to use or create one if necessary through GPSbabel
"""
if sys.platform == 'win32':
openGpx=wx.FileDialog(self,style=wx.FD_MULTIPLE)
else:
if sys.platform.find("linux")!=-1:
openGpx=wx.FileDialog(self)
else:
if sys.platform == 'darwin':
openGpx=wx.FileDialog(self)
openGpx.SetWildcard("GPX Files(*.gpx)|*.gpx|NMEA Files (*.txt)|*.txt")
openGpx.ShowModal()
if sys.platform == 'win32':
self.gpxFile=openGpx.GetPaths()
else:
if sys.platform.find("linux")!=-1:
self.gpxFile=[openGpx.GetPath()]
else:
if sys.platform == 'darwin':
self.gpxFile=[openGpx.GetPath()]
j=0
for track in self.gpxFile:
if os.path.basename(self.gpxFile[j]).find(".txt")>0 or\
os.path.basename(self.gpxFile[j]).find(".TXT")>0:
try:
target=self.gpxFile[j].split(".txt")[0]+".gpx"
babelResult=os.popen('gpsbabel -i nmea -f "%s" -o gpx -F "%s"' \
% (self.gpxFile[j],target)).read()
#print babelResult
self.gpxFile[j]=target
j+=1
if os.path.isfile(target)==True:
wx.CallAfter(self.consolePrint,\
_("For information, GPX file created with GPSBabel in your picture folder."))
else:
wx.CallAfter(self.consolePrint,_("Possible problem with the creation of the gpx file"))
except:
wx.CallAfter(self.consolePrint,_("Couldn't create the necessary GPX file."))
gpxPaths=""
i=0
for path in self.gpxFile:
gpxPaths+=self.gpxFile[i]+" "
i+=1
self.gpxEntry.SetValue(gpxPaths)
def findPictures(self,evt):
"""Select the folder pictures to use"""
openDir=wx.DirDialog(self)
# openDir.SetPath(self.picDirDefault)
if self.picDir!="":
openDir.SetPath(self.picDir)
openDir.ShowModal()
self.picDir=openDir.GetPath()
self.dirEntry.SetValue(self.picDir)
def syncPictures(self,evt):
"""Sync. pictures with the .gpx file"""
if self.dirEntry.GetValue()=="" or self.gpxEntry.GetValue=="":
wx.CallAfter(self.consolePrint,_("You must first select a pictures folder and a GPX file.")+"\n")
else:
pass
self.geCheck.SetValue(True) # Oblige the cration of a GE file anyway
self.stop=False
self.utcOffset=float(self.utcEntry.GetValue())#testing float for UTC
dateProcess=self.dateCheck.GetValue()
self.log=self.logFile.GetValue()
self.interpolation=self.interpolationCheck.GetValue()
timeStampOrder=self.geTStamps.GetValue()
#print "self.utcOffset= ",self.utcOffset
eleMode=self.elevationChoice.GetSelection()
def sync():
if self.dirEntry.GetValue()!="" and self.gpxEntry.GetValue!="":
if self.timezone:
wx.CallAfter(self.consolePrint,"\n------\n"+_("Beginning synchronization with ")\
+_("Time zone is ")+self.timezone+\
_(" and maximum time difference = ")+self.timerangeEntry.GetValue().encode()+_(" seconds")+"\n")
else:
wx.CallAfter(self.consolePrint,"\n------\n"+_("Beginning synchronization with ")\
+_("UTC Offset =")+self.utcEntry.GetValue().encode()+\
_(" hours and maximum time difference = ")+self.timerangeEntry.GetValue().encode()+_(" seconds")+"\n")
else:
pass
geo=GpicSync(gpxFile=self.gpxFile,tcam_l=self.tcam_l,tgps_l=self.tgps_l,timezone=self.timezone,
UTCoffset=self.utcOffset,dateProcess=dateProcess,timerange=int(self.timerangeEntry.GetValue()),
backup=False,interpolation=self.interpolation)
if self.backupCheck.GetValue()==True:
backupFolder=self.picDir+'/originals-backup-'+os.path.basename(self.picDir)+'/'
wx.CallAfter(self.consolePrint,"\n"+
_("Creating an 'originals-backup' folder.")+"\n")
try:
os.mkdir(backupFolder)
except:
print "Couldn't create the backup folder, it maybe already exist"
if self.geCheck.GetValue()==True:
wx.CallAfter(self.consolePrint,"\n"+_("Starting to generate a Google Earth file (doc.kml) in the picture folder ...")+" \n")
localKml=KML(self.picDir+"/doc",os.path.basename(self.picDir),timeStampOrder=timeStampOrder,
utc=self.utcEntry.GetValue(),eleMode=eleMode,iconsStyle=self.iconsChoice.GetSelection(),gmaps=False)
localKml.writeInKml("\n<Folder>\n<name>Photos</name>")
try:
os.mkdir(self.picDir+'/thumbs')
except:
print "Couldn't create the thumbs folder, it maybe already exist"
if self.gmCheck.GetValue()==True:
wx.CallAfter(self.consolePrint,"\n"+_("Starting to generate a Google Map file (doc-web.kml) in the picture folder")+" ... \n")
webKml=KML(self.picDir+"/doc-web",os.path.basename(self.picDir),url=self.urlEntry.GetValue(),
utc=self.utcEntry.GetValue(),gmaps=True)
webKml.path(self.gpxFile)
webKml.writeInKml("\n<Folder>\n<name>Photos</name>")
if self.log==True:
f=open(self.picDir+'/gpicsync.log','w')
f.write(_("Geocoded with UTC Offset= ")+
self.utcEntry.GetValue()+_(" and Maximum time difference = ")\
+self.timerangeEntry.GetValue()+"\n")
f.write(_("Pictures Folder: ")+self.picDir+"\n")
f.write(_("GPX file: ")+self.gpxEntry.GetValue()+"\n\n")
for fileName in sorted(os.listdir ( self.picDir )):
if self.stop==True: break
if fnmatch.fnmatch ( fileName, '*.JPG' )\
or fnmatch.fnmatch ( fileName, '*.jpg' )\
or fnmatch.fnmatch ( fileName, '*.CR2' )\
or fnmatch.fnmatch ( fileName, '*.cr2' )\
or fnmatch.fnmatch ( fileName, '*.arw' )\
or fnmatch.fnmatch ( fileName, '*.ARW' )\
or fnmatch.fnmatch ( fileName, '*.CRW' )\
or fnmatch.fnmatch ( fileName, '*.crw' )\
or fnmatch.fnmatch ( fileName, '*.NEF' )\
or fnmatch.fnmatch ( fileName, '*.nef' )\
or fnmatch.fnmatch ( fileName, '*.PEF' )\
or fnmatch.fnmatch ( fileName, '*.pef' )\
or fnmatch.fnmatch ( fileName, '*.RAW' )\
or fnmatch.fnmatch ( fileName, '*.raw' )\
or fnmatch.fnmatch ( fileName, '*.rw2' )\
or fnmatch.fnmatch ( fileName, '*.ORF' )\
or fnmatch.fnmatch ( fileName, '*.orf' )\
or fnmatch.fnmatch ( fileName, '*.DNG' )\
or fnmatch.fnmatch ( fileName, '*.dng' )\
or fnmatch.fnmatch ( fileName, '*.RAF' )\
or fnmatch.fnmatch ( fileName, '*.raf' )\
or fnmatch.fnmatch ( fileName, '*.MRW' )\
or fnmatch.fnmatch ( fileName, '*.mrw' ):
print "\nFound fileName ",fileName," Processing now ..."
wx.CallAfter(self.consolePrint,"\n"+_("(Found ")+fileName+" ...")
print self.picDir+'/'+fileName
backupFolder=self.picDir+'/originals-backup-'+os.path.basename(self.picDir)+'/'
if self.backupCheck.GetValue()==True\
and os.path.isfile(backupFolder+fileName)==False:
shutil.copyfile(self.picDir+'/'+fileName,backupFolder+fileName)
#Create thumb and make a preview
if fnmatch.fnmatch (fileName, '*.JPG') or fnmatch.fnmatch (fileName, '*.jpg'):
print "Create a thumb now!"
try:
im=Image.open(self.picDir+'/'+fileName)
width=int(im.size[0])
height=int(im.size[1])
if width>height:
max=width
else:
max=height
zoom=float(160.0/max)
im.thumbnail((int(width*zoom),int(height*zoom)))
im.save(self.picDir+"/thumbs/"+"thumb_"+fileName)
wx.CallAfter(self.imagePreview,self.picDir+"/thumbs/"+"thumb_"+fileName)
except:
print "Warning: didn't create thumbnail, no JPG file ?"
result=geo.syncPicture(self.picDir+'/'+fileName)
wx.CallAfter(self.consolePrint,result[0]+"\n")
#Check if the picture have Date/Time infos, otherwise go to next pic.
if result[0]==" : WARNING: DIDN'T GEOCODE, no Date/Time Original in this picture.":
continue
if self.log==True:
f.write(_("Processed image ")+fileName+" : "+result[0]+"\n")
if self.geCheck.GetValue()==True and result[1] !="" and result[2] !="":
localKml.placemark(self.picDir+'/'+fileName,lat=result[1],
long=result[2],width=result[3],height=result[4],timeStamp=result[5],
elevation=result[6])
if self.gmCheck.GetValue()==True and result[1] !="" and result[2] !="":
webKml.placemark4Gmaps(self.picDir+'/'+fileName,lat=result[1],long=result[2],width=result[3],height=result[4],elevation=result[6])
if self.geonamesCheck.GetValue()==True and result[1] !="" and result[2] !="": # checks if geonames checked and lat/lon exist
try:
nearby=Geonames(lat=result[1],long=result[2])
except:
wx.CallAfter(self.consolePrint,_("Couldn't retrieve geonames data...")+"\n")
try:
if self.geoname_nearbyplace==True:
gnPlace=unicode(nearby.findNearbyPlace())
else: gnPlace=""
except:
print "oooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooooo"
gnPlace=""
try:
gnDistance=unicode(nearby.findDistance())
except:
gnDistance=""
try:
if self.geoname_region==True:
gnRegion=unicode(nearby.findRegion())
else: gnRegion=""
except:
gnRegion=""
try:
if self.geoname_country==True:
gnCountry=unicode(nearby.findCountry())
else:
gnCountry=""
except:
gnCountry=""
try:
if self.geoname_userdefine !="":
userdefine=self.geoname_userdefine
else: userdefine=""
except:
userdefine=""
try:
gnCountryCode=nearby.findCountryCode()
except:
gnCountryCode=""
print "!!! Something went wrong while retreiving country code !!!"
try:
gnOrientation=nearby.findOrientation()
except:
gnOrientation=""
print "!!! Something went wrong while retreiving orientation !!!"
#try:
if 1:
if self.geoname_summary==True:
gnSummary=gnDistance+" Km to "+unicode(gnPlace)+" in "+unicode(gnRegion)+" "+unicode(gnCountry)
else:
gnSummary=""
gnInfos="Geonames: "+gnDistance+" Km "+gnOrientation +" "+ gnPlace+" "+gnRegion+" "+gnCountry+" "+gnCountryCode
#print "gnInfos:",gnInfos
geotag="geotagged"
tempLat=str(decimal.Decimal(result[1]).quantize(decimal.Decimal('0.000001')))
tempLong=str(decimal.Decimal(result[2]).quantize(decimal.Decimal('0.000001')))
geotagLat="geo:lat="+tempLat
geotagLon="geo:lon="+tempLong
wx.CallAfter(self.consolePrint,gnInfos+_(", writing geonames)")+"\n")
geonameKeywords="" # create initial geonames string command
exiftagOptions = []
exiftagOptions.append(self.exifcmd)
print userdefine
if self.gnOptChoice.GetSelection() in [2,3]:
for geoname in [gnPlace,gnRegion,gnCountry,gnSummary,geotag,geotagLat,geotagLon,userdefine]:
if geoname !="":
geonameKeywords+=' -keywords="%s" ' % unicode(geoname)
exiftagOptions.append((u'-keywords="%s" ' % geoname))
if self.geoname_caption==True:
gnIPTCsummary= self.geoname_IPTCsummary
for var in [("{LATITUDE}",tempLat),("{LONGITUDE}",tempLong),
("{DISTANCETO}",gnDistance),("{NEARBYPLACE}",unicode(gnPlace)),
("{REGION}",unicode(gnRegion)),("{COUNTRY}",unicode(gnCountry)),("{ORIENTATION}",gnOrientation)]:
gnIPTCsummary=gnIPTCsummary.replace(var[0],var[1])
gnIPTCsummary=' -iptc:caption-abstract="'+gnIPTCsummary+'"' #took space out before -iptc
#print "=== gnIPTCsummary=== ",gnIPTCsummary, "======"
if self.gnOptChoice.GetSelection() in [0,1]:
if gnPlace !="": geonameKeywords+=' -iptc:city="'+unicode(gnPlace)+'"'
if gnRegion !="": geonameKeywords+=' -iptc:province-state="'+unicode(gnRegion)+'"'
if gnCountry !="": geonameKeywords+=' -iptc:Country-PrimaryLocationName="'+unicode(gnCountry)+'"'
if gnPlace !="": exiftagOptions.append(('-iptc:city="'+unicode(gnPlace)+'"'))
if gnRegion !="": exiftagOptions.append(('-iptc:province-state="'+unicode(gnRegion)+'"'))
if gnCountry !="": exiftagOptions.append(('-iptc:Country-PrimaryLocationName="'+unicode(gnCountry)+'"'))
#print "*************",gnCountryCode,type(gnCountryCode)
if gnCountryCode !="": geonameKeywords+=' -iptc:Country-PrimaryLocationCode="'+unicode(gnCountryCode)+'"'
if gnCountryCode !="": exiftagOptions.append(('-iptc:Country-PrimaryLocationCode="'+unicode(gnCountryCode)+'"'))
if 1:
geonameKeywords+=' -iptc:Sub-location="'+unicode(gnDistance)+" Km "+unicode(gnOrientation)+" "+unicode(gnPlace)+'"'
exiftagOptions.append(('-iptc:Sub-location="'+unicode(gnDistance)+" Km "+unicode(gnOrientation)+" "+unicode(gnPlace)+'"'))
if gnPlace !="": geonameKeywords+=' -iptc:city="'+unicode(gnPlace)+'"'
if self.gnOptChoice.GetSelection() in [0,2]:
geonameKeywords+=unicode(gnIPTCsummary)
exiftagOptions.append(gnIPTCsummary)
#print "\n=== geonameKeywords ===\n", geonameKeywords,"\n======"
# WRITE GEONAMES
## I'm stuck at writing the Geonames in utf-8 on Windows 7
## It works when the script gpicsync-GUI.py is excecuted from the Eclipse but fail when gpicsync-GUI.py is excecuted from the DOS cmd.
## It seems related to this issue http://bugs.python.org/issue1759845 which apparently is solved but only for Python 3 ??
## Help would be appreciated !!! :/
if 1:
# tried: unicode(geonameKeywords).encode(locale.getpreferredencoding()) ??
# tried: unicode(s.decode("utf-8")).encode("utf-8")
# tried: -tagsfromfile @ -iptc:all -codedcharacterset=utf8
# tried : geonameKeywords.decode("utf-8")).encode("iso-8859-1")
# trying lib unidecode see unicode to ascii (see issue 117 regarding Python 2.x and command line to exiftool containing unicode)
#print geonameKeywords
if sys.platform!=("linux2" or "darwin"): # Usage of unidecode for now on windows due to above problem ( https://code.google.com/p/gpicsync/issues/detail?id=117 )
os.popen('%s -tagsfromfile @ -iptc:all -codedcharacterset=utf8 %s -overwrite_original "-DateTimeOriginal>FileModifyDate" "%s" '%(self.exifcmd,unidecode(geonameKeywords),self.picDir+'/'+fileName))
else: # following line contributed by hsivonen which may work on OSX / Linux ( https://code.google.com/p/gpicsync/issues/detail?id=91 )
os.popen(('%s %s -overwrite_original "-DateTimeOriginal>FileModifyDate" "%s" '%(self.exifcmd,geonameKeywords.decode("utf-8"),self.picDir+'/'+fileName)).encode("utf-8"))
#os.popen('%s -tagsfromfile @ -iptc:all -codedcharacterset=utf8 %s -overwrite_original "-DateTimeOriginal>FileModifyDate" "%s" '%(self.exifcmd,unicode(geonameKeywords).encode("utf-8"),self.picDir+'/'+fileName))
if 0:
exiftagOptions.append(u'-overwrite_original')
exiftagOptions.append(u'-DateTimeOriginal>FileModifyDate')
exiftagOptions.append(self.picDir+'/'+fileName)
print(exiftagOptions)
subprocess.call(exiftagOptions)
#except:
if 0:
print "Had problem when writing geonames"
traceback.print_exc(file=sys.stdout)
if self.stop==False:
wx.CallAfter(self.consolePrint,"\n*** "+_("FINISHED GEOCODING PROCESS")+" ***\n")
if self.stop==True:
wx.CallAfter(self.consolePrint,"\n *** "+_("PROCESSING STOPPED BY THE USER")+" ***\n")
if self.log==True: f.close()
if self.geCheck.GetValue()==True:
localKml.writeInKml("</Folder>\n")
wx.CallAfter(self.consolePrint,"\n"+_("Adding the GPS track log to the Google Earth kml file")+"...\n")
localKml.path(self.gpxFile,cut=10000)
localKml.close()
wx.CallAfter(self.consolePrint,"\n"+_("Click on the 'View in Google Earth' button to visualize the result")+".\n")
wx.CallAfter(self.consolePrint,_("( A Google Earth doc.kml file has been created in your picture folder.)")+"\n")
if self.gmCheck.GetValue()==True:
webKml.writeInKml("</Folder>\n")
webKml.close()
wx.CallAfter(self.consolePrint,_("( A Google Maps doc-web.kml file has been created with the given url )")+"\n")
start_new_thread(sync,())
def localtimeCorrection(self,evt):
""" Local time correction if GPS and camera wasn't synchronized """
self.tcam_l=self.camEntry.GetValue()
self.tgps_l=self.gpsEntry.GetValue()
wx.CallAfter(self.consolePrint,"\n"+_("A time correction has been set")+" : "+
_("Time camera= ")+self.tcam_l+_(" Time GPS= ")+self.tgps_l+" .\n")
print "tcam_l =",self.tcam_l
print "tgps_l =",self.tgps_l
def quitLocaltimeCorrection(self,evt):
self.winOpt.Close()
def localtimeFrame(self,evt):
"""A frame for local time correction"""
frameWidth=440
if sys.platform == 'darwin':
frameWidth=530
self.winOpt=wx.Frame(win,size=(frameWidth,280),title=_("Local time corrections"))
bkg=wx.Panel(self.winOpt)
#bkg.SetBackgroundColour('blue steel')
text="\t"+_("Use this option ONLY if your camera local time is wrong.")\
+"\n\n"+_("Indicate here the local time now displayed by your camera and GPS (hh:mm:ss)")
utcLabel = wx.StaticText(bkg, -1,text)
camLabel = wx.StaticText(bkg, -1,_("Local time displayed now by the camera"))
self.camEntry=wx.TextCtrl(bkg,size=(100,-1))
self.camEntry.SetValue(self.tcam_l)
gpsLabel = wx.StaticText(bkg, -1,_("Local time displayed now by the GPS"))
self.gpsEntry=wx.TextCtrl(bkg,size=(100,-1))
self.gpsEntry.SetValue(self.tgps_l)
applyButton=wx.Button(bkg,size=(130,30),label=_("Apply correction"))
quitButton=wx.Button(bkg,size=(70,30),label=_("Quit"))
self.Bind(wx.EVT_BUTTON, self.localtimeCorrection, applyButton)
self.Bind(wx.EVT_BUTTON, self.quitLocaltimeCorrection, quitButton)
hbox=wx.BoxSizer()
hbox.Add(applyButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
hbox.Add(quitButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(utcLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(camLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.camEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(gpsLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=5)
vbox.Add(self.gpsEntry,proportion=0,flag=wx.ALIGN_CENTER,border=5)
vbox.Add(hbox,proportion=0,flag=wx.ALIGN_CENTER,border=5)
bkg.SetSizer(vbox)
self.winOpt.Show()
def exifFrame(self,evt):
"""A frame for the exifReader tool"""
frameWidth=280
if sys.platform == 'darwin':
frameWidth=330
self.winExifReader=wx.Frame(win,size=(frameWidth,220),title=_("EXIF Reader"))
bkg=wx.Panel(self.winExifReader)
#bkg.SetBackgroundColour('White')
text=_("Read the EXIF metadata of the selected picture.")
introLabel = wx.StaticText(bkg, -1,text)
self.ExifReaderSelected=_("All EXIF metadata")
radio1=wx.RadioButton(bkg,-1,_("All EXIF metadata"))
radio2=wx.RadioButton(bkg,-1,_("Date/Time/Lat./Long."))
def onRadio(evt):
radioSelected=evt.GetEventObject()
self.ExifReaderSelected=radioSelected.GetLabel()
for eachRadio in [radio1,radio2]:
self.Bind(wx.EVT_RADIOBUTTON ,onRadio,eachRadio)
readButton=wx.Button(bkg,size=(130,30),label=_("Select a picture"))
self.Bind(wx.EVT_BUTTON, self.readEXIF, readButton)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(introLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(radio1,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=10)
vbox.Add(radio2,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=10)
vbox.Add(readButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
bkg.SetSizer(vbox)
self.winExifReader.Show()
def readEXIF(self,evt):
"""read the selected EXIF informations and eventually crate a thumb"""
print "Selected ",self.ExifReaderSelected
picture=wx.FileDialog(self)
picture.ShowModal()
pathPicture=picture.GetPath()
if pathPicture !="" or None:
myPicture=GeoExif(pathPicture)
try:
pathThumb=str(os.path.dirname(pathPicture))+"/thumbs/thumb_"+str(os.path.basename(pathPicture))
print "Thumb path",pathThumb
if os.path.isfile(pathThumb)==False:
if os.path.isdir(os.path.dirname(pathThumb))==False:
os.mkdir(os.path.dirname(pathThumb))
im=Image.open(pathPicture)
width=im.size[0]
height=im.size[1]
if width>height:max=width
else: max=height
zoom=float(160.0/max)
im.thumbnail((int(im.size[0]*zoom),int(im.size[1])*zoom))
im.save(pathThumb)
self.imagePreview(prevPath=pathThumb)
except:
print "Coudln't create a thumnail, probably not a JPG file"
def read():
wx.CallAfter(self.consolePrint,"\n\n"+_("Selected metada ")+"\n")
wx.CallAfter(self.consolePrint,"-------------------\n")
if self.ExifReaderSelected==_("All EXIF metadata"):
wx.CallAfter(self.consolePrint,pathPicture+"\n\n")
wx.CallAfter(self.consolePrint,myPicture.readExifAll())
if self.ExifReaderSelected==_("Date/Time/Lat./Long."):
dateTime=myPicture.readDateTime()
datetimeString=dateTime[0]+":"+dateTime[1]
wx.CallAfter(self.consolePrint,pathPicture+"\n\n")
if len(datetimeString)>5:
wx.CallAfter(self.consolePrint,datetimeString)
wx.CallAfter(self.consolePrint," "+_("lat./long.")+"="+str(myPicture.readLatLong()))
else:
wx.CallAfter(self.consolePrint,_("None"))
start_new_thread(read,())
self.winExifReader.Close()
def renameFrame(self,evt):
"""A frame for the rename tool"""
self.winRenameTool=wx.Frame(win,size=(300,220),title=_("Renaming tool"))
bkg=wx.Panel(self.winRenameTool)
#bkg.SetBackgroundColour('White')
text=_("This tool renames your pictures with the ")+"\n"+_("original time/date and lat./long.(if present)")
introLabel = wx.StaticText(bkg, -1,text)
readButton=wx.Button(bkg,size=(200,30),label=_("Rename pictures in a folder"))
readButtonFolder=wx.Button(bkg,size=(200,30),label=_("Rename a single picture"))
self.Bind(wx.EVT_BUTTON, self.renamePicturesInFolder, readButton)
self.Bind(wx.EVT_BUTTON, self.renamePicture, readButtonFolder)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(introLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(readButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=10)
vbox.Add(readButtonFolder,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=10)
bkg.SetSizer(vbox)
self.winRenameTool.Show()
def renamePicture(self,evt):
"""A tool to rename pictures of a directory"""
picture=wx.FileDialog(self)
picture.ShowModal()
picture.SetWildcard("*.JPG")
self.pathPicture=picture.GetPath()
self.winRenameTool.Close()
if self.pathPicture !="" or None:
wx.CallAfter(self.consolePrint,"\n"+_("Beginning renaming..."))
def rename():
myPicture=GeoExif(self.pathPicture)
string=myPicture.readDateTime()[0]+" "+myPicture.readDateTime()[1]
string=string.replace(":","-")
latlong=myPicture.readLatLong()
if latlong==None: latlong=""
if len(string)<5:
wx.CallAfter(self.consolePrint,"\n"+_("Didn't find Original Time/Date for ")+self.pathPicture)
else:
os.rename(self.pathPicture,os.path.dirname(self.pathPicture)+"/"+string+" "+latlong+".jpg")
wx.CallAfter(self.consolePrint,"\n"+_("Renamed ")+os.path.basename(self.pathPicture)+" -> "+string+latlong+".jpg")
start_new_thread(rename,())
def renamePicturesInFolder(self,evt):
self.stop=False
self.winRenameTool.Close()
openDir=wx.DirDialog(self)
openDir.ShowModal()
self.picDir=openDir.GetPath()
if self.picDir!="" or None:
wx.CallAfter(self.consolePrint,"\n"+_("Beginning renaming..."))
def rename():
for fileName in os.listdir ( self.picDir ):
if self.stop==True:
wx.CallAfter(self.consolePrint,"\n"+_("Interrupted by the user"))
self.stop=False
break
if fnmatch.fnmatch ( fileName, '*.JPG' )or \
fnmatch.fnmatch ( fileName, '*.jpg' ):
print self.picDir+'/'+fileName
myPicture=GeoExif(self.picDir+"/"+fileName)
string=myPicture.readDateTime()[0]+" "+myPicture.readDateTime()[1]
print string, len(string)
if len(string)<5:
wx.CallAfter(self.consolePrint,"\n"+_("Didn't find Original Time/Date for ")+fileName)
break
string=string.replace(":","-")
latlong=myPicture.readLatLong()
if latlong==None: latlong=""
print "latlong= ",latlong
os.rename(self.picDir+'/'+fileName,self.picDir+"/"+string+" "+latlong+".jpg")
wx.CallAfter(self.consolePrint,"\n"+_("Renamed ")+fileName+" to "+string+" "+latlong+".jpg")
wx.CallAfter(self.consolePrint,"\n"+_("Finished"))
start_new_thread(rename,())
def kmzGeneratorFrame(self,evt):
"""A frame to generate a KMZ file"""
self.winKmzGenerator=wx.Frame(win,size=(280,180),title="KMZ Generator")
bkg=wx.Panel(self.winKmzGenerator)
text="\n"+_("Create a kmz file archive")
introLabel = wx.StaticText(bkg, -1,text)
readButton=wx.Button(bkg,size=(150,30),label=_("Create KMZ file !"))
self.Bind(wx.EVT_BUTTON, self.kmzGenerator, readButton)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(introLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(readButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
bkg.SetSizer(vbox)
if sys.platform == 'darwin':
wx.CallAfter(self.consolePrint,"\n"+_("Sorry this tool is not yet available for the MacOS X version")+" \n")
else:
self.winKmzGenerator.Show()
def kmzGenerator(self,evt):
"""A tool to create a kmz file containing the geolocalized pictures"""
print "kmz ordered ..."
self.winKmzGenerator.Close()
if self.picDir == None or self.picDir !="":
wx.CallAfter(self.consolePrint,"\n"+_("Creating a KMZ file in the pictures folder...")+"\n")
zip = zipfile.ZipFile(self.picDir+'/'+os.path.basename(self.picDir)+".zip", 'w')
zip.write(self.picDir+'/doc.kml','doc.kml',zipfile.ZIP_DEFLATED)
wx.CallAfter(self.consolePrint,"\n"+_("Adding doc.kml"))
for fileName in os.listdir ( self.picDir ):
if fnmatch.fnmatch ( fileName, '*.JPG' )or fnmatch.fnmatch ( fileName, '*.jpg' ):
zip.write(self.picDir+"/"+fileName,fileName.encode(),zipfile.ZIP_DEFLATED)
wx.CallAfter(self.consolePrint,"\n"+_("Adding ")+fileName)
if (self.iconsChoice.GetSelection() == 0):
wx.CallAfter(self.consolePrint,"\n"+_("Adding ") + "thumbs")
for fileName in os.listdir ( self.picDir+'/thumbs' ):
zip.write(self.picDir+"/thumbs/"+fileName,'thumbs/' + fileName.encode(),zipfile.ZIP_DEFLATED)
wx.CallAfter(self.consolePrint,"\n"+_("Adding ")+fileName)
zip.close()
try:
os.rename(self.picDir+'/'+os.path.basename(self.picDir)+".zip",self.picDir+'/'+os.path.basename(self.picDir)+".kmz")
wx.CallAfter(self.consolePrint,"\n"+_("KMZ file created in pictures folder")+"\n")
except WindowsError:
wx.CallAfter(self.consolePrint,"\n"+_("Couldn't rename the zip file to kmz (maybe a previous file already exist)")+"\n")
else:
text="\n --- \n"+_("To create a Google Earth kmz file you must either:")+"\n\n"\
+_("- finish a synchronisation")+"\n"\
+_("- select a folder you've already synchronized then select the KMZ Generator tool")+"\n --- \n"
wx.CallAfter(self.consolePrint,text)
def gpxInspectorFrame(self,evt):
"""A frame to inspect a gpx file"""
self.winGpxInspector=wx.Frame(win,size=(280,180),title=_("GPX Inspector"))
bkg=wx.Panel(self.winGpxInspector)
text=_("Inspect a gpx file and show tracklog data.")
introLabel = wx.StaticText(bkg, -1,text)
readButton=wx.Button(bkg,size=(150,30),label=_("Select a gpx file"))
self.Bind(wx.EVT_BUTTON, self.gpxInspector, readButton)
vbox=wx.BoxSizer(wx.VERTICAL)
vbox.Add(introLabel,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
vbox.Add(readButton,proportion=0,flag=wx.ALIGN_CENTER|wx.ALL,border=20)
bkg.SetSizer(vbox)
self.winGpxInspector.Show()
def gpxInspector(self,evt):
"""A tool to display data from a gpx file"""
gpx=wx.FileDialog(self)
gpx.ShowModal()
gpx.SetWildcard("*.gpx")
gpxPath=gpx.GetPath()
self.winGpxInspector.Close()
print "gpxPath=", gpxPath
if gpxPath =="" or None:
wx.CallAfter(self.consolePrint,"\n"+_("Select a gpx file first."))
else:
gpxPath=[gpxPath]
myGpx=Gpx(gpxPath).extract()
wx.CallAfter(self.consolePrint,"\n"+_("Looking at ")+gpxPath[0]+"\n")
wx.CallAfter(self.consolePrint,"\n"+_("Number of valid track points found")+" : "+str(len(myGpx))+"\n\n")
def inspect():
for trkpt in myGpx:
wx.CallAfter(self.consolePrint,_("Date")+": "+trkpt["date"]+"\t"+_("Time")+": "\
+trkpt["time"]+"\t"+_("Latitude")+": "+trkpt["lat"]
+"\t"+_("Longitude")+": "+trkpt["lon"]
+"\t"+_("Altitude")+": "+trkpt["ele"]+"\n")
start_new_thread(inspect,())
def tzMenuPopup(self, evt):
"""Show timezones menu"""
self.tzButton.PopupMenu(self.tzMenu, (0,self.tzButton.GetSize().GetHeight()))
def manualTZ(self, evt):
self.timezone = None
self.tzButton.SetLabel(_("Manual UTC offset"))
self.utcLabel.Enable()
self.utcEntry.Enable()
def selectTZ(self, evt):
"""Choose a selected timezone"""
self.timezone = timezones[evt.GetId()-3000]
self.tzButton.SetLabel(self.timezone)
self.utcLabel.Disable()
self.utcEntry.Disable()
app=wx.App(redirect=False)
win=GUI(None,title="GPicSync GUI")
win.Show()
app.MainLoop()
| gpl-2.0 |
jay-lau/magnum | magnum/tests/functional/api/v1/test_baymodel.py | 1 | 7160 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib.common.utils import data_utils
from tempest.lib import exceptions
import testtools
from magnum.tests.functional.common import base
from magnum.tests.functional.common import datagen
class BayModelTest(base.BaseMagnumTest):
"""Tests for baymodel CRUD."""
def __init__(self, *args, **kwargs):
super(BayModelTest, self).__init__(*args, **kwargs)
self.baymodels = []
self.baymodel_client = None
self.keypairs_client = None
def setUp(self):
try:
super(BayModelTest, self).setUp()
(self.baymodel_client,
self.keypairs_client) = self.get_clients_with_new_creds(
type_of_creds='default',
request_type='baymodel')
except Exception:
self.tearDown()
raise
def tearDown(self):
for baymodel_id in self.baymodels:
self._delete_baymodel(baymodel_id)
self.baymodels.remove(baymodel_id)
super(BayModelTest, self).tearDown()
def _create_baymodel(self, baymodel_model):
resp, model = self.baymodel_client.post_baymodel(baymodel_model)
self.assertEqual(201, resp.status)
self.baymodels.append(model.uuid)
return resp, model
def _delete_baymodel(self, baymodel_id):
resp, model = self.baymodel_client.delete_baymodel(baymodel_id)
self.assertEqual(204, resp.status)
return resp, model
@testtools.testcase.attr('positive')
def test_list_baymodels(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
_, temp_model = self._create_baymodel(gen_model)
resp, model = self.baymodel_client.list_baymodels()
self.assertEqual(200, resp.status)
self.assertGreater(len(model.baymodels), 0)
self.assertIn(
temp_model.uuid, list([x['uuid'] for x in model.baymodels]))
@testtools.testcase.attr('positive')
def test_create_baymodel(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
resp, model = self._create_baymodel(gen_model)
@testtools.testcase.attr('positive')
def test_update_baymodel_by_uuid(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
resp, old_model = self._create_baymodel(gen_model)
patch_model = datagen.baymodel_name_patch_data()
resp, new_model = self.baymodel_client.patch_baymodel(
old_model.uuid, patch_model)
self.assertEqual(200, resp.status)
resp, model = self.baymodel_client.get_baymodel(new_model.uuid)
self.assertEqual(200, resp.status)
self.assertEqual(old_model.uuid, new_model.uuid)
self.assertEqual(model.name, new_model.name)
@testtools.testcase.attr('positive')
def test_delete_baymodel_by_uuid(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
resp, model = self._create_baymodel(gen_model)
resp, _ = self.baymodel_client.delete_baymodel(model.uuid)
self.assertEqual(204, resp.status)
self.baymodels.remove(model.uuid)
@testtools.testcase.attr('positive')
def test_delete_baymodel_by_name(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
resp, model = self._create_baymodel(gen_model)
resp, _ = self.baymodel_client.delete_baymodel(model.name)
self.assertEqual(204, resp.status)
self.baymodels.remove(model.uuid)
@testtools.testcase.attr('negative')
def test_get_baymodel_by_uuid_404(self):
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.get_baymodel, data_utils.rand_uuid())
@testtools.testcase.attr('negative')
def test_update_baymodel_404(self):
patch_model = datagen.baymodel_name_patch_data()
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.patch_baymodel,
data_utils.rand_uuid(), patch_model)
@testtools.testcase.attr('negative')
def test_delete_baymodel_404(self):
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.delete_baymodel, data_utils.rand_uuid())
@testtools.testcase.attr('negative')
def test_get_baymodel_by_name_404(self):
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.get_baymodel, 'fooo')
@testtools.testcase.attr('negative')
def test_update_baymodel_name_not_found(self):
patch_model = datagen.baymodel_name_patch_data()
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.patch_baymodel, 'fooo', patch_model)
@testtools.testcase.attr('negative')
def test_delete_baymodel_by_name_404(self):
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.get_baymodel, 'fooo')
@testtools.testcase.attr('negative')
def test_create_baymodel_missing_image(self):
gen_model = datagen.baymodel_data_with_valid_keypair()
self.assertRaises(
exceptions.BadRequest,
self.baymodel_client.post_baymodel, gen_model)
@testtools.testcase.attr('negative')
def test_create_baymodel_missing_keypair(self):
gen_model = datagen.baymodel_data_with_valid_image_and_flavor()
self.assertRaises(
exceptions.NotFound,
self.baymodel_client.post_baymodel, gen_model)
@testtools.testcase.attr('negative')
def test_update_baymodel_invalid_patch(self):
# get json object
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
resp, old_model = self._create_baymodel(gen_model)
self.assertRaises(
exceptions.BadRequest,
self.baymodel_client.patch_baymodel, data_utils.rand_uuid(),
gen_model)
@testtools.testcase.attr('negative')
def test_create_baymodel_invalid_network_driver(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
gen_model.network_driver = 'invalid_network_driver'
self.assertRaises(
exceptions.BadRequest,
self.baymodel_client.post_baymodel, gen_model)
@testtools.testcase.attr('negative')
def test_create_baymodel_invalid_volume_driver(self):
gen_model = datagen.baymodel_data_with_valid_keypair_image_flavor()
gen_model.volume_driver = 'invalid_volume_driver'
self.assertRaises(
exceptions.BadRequest,
self.baymodel_client.post_baymodel, gen_model)
| apache-2.0 |
abhishek-ch/hue | desktop/core/ext-py/Django-1.6.10/tests/admin_util/tests.py | 51 | 11281 | from __future__ import absolute_import, unicode_literals
from datetime import datetime
from django.conf import settings
from django.contrib import admin
from django.contrib.admin import helpers
from django.contrib.admin.util import (display_for_field, flatten_fieldsets,
label_for_field, lookup_field, NestedObjects)
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
from django.contrib.sites.models import Site
from django.db import models, DEFAULT_DB_ALIAS
from django import forms
from django.test import SimpleTestCase, TestCase
from django.utils.formats import localize
from django.utils.safestring import mark_safe
from django.utils import six
from .models import Article, Count, Event, Location, EventGuide
class NestedObjectsTests(TestCase):
"""
Tests for ``NestedObject`` utility collection.
"""
def setUp(self):
self.n = NestedObjects(using=DEFAULT_DB_ALIAS)
self.objs = [Count.objects.create(num=i) for i in range(5)]
def _check(self, target):
self.assertEqual(self.n.nested(lambda obj: obj.num), target)
def _connect(self, i, j):
self.objs[i].parent = self.objs[j]
self.objs[i].save()
def _collect(self, *indices):
self.n.collect([self.objs[i] for i in indices])
def test_unrelated_roots(self):
self._connect(2, 1)
self._collect(0)
self._collect(1)
self._check([0, 1, [2]])
def test_siblings(self):
self._connect(1, 0)
self._connect(2, 0)
self._collect(0)
self._check([0, [1, 2]])
def test_non_added_parent(self):
self._connect(0, 1)
self._collect(0)
self._check([0])
def test_cyclic(self):
self._connect(0, 2)
self._connect(1, 0)
self._connect(2, 1)
self._collect(0)
self._check([0, [1, [2]]])
def test_queries(self):
self._connect(1, 0)
self._connect(2, 0)
# 1 query to fetch all children of 0 (1 and 2)
# 1 query to fetch all children of 1 and 2 (none)
# Should not require additional queries to populate the nested graph.
self.assertNumQueries(2, self._collect, 0)
def test_on_delete_do_nothing(self):
"""
Check that the nested collector doesn't query for DO_NOTHING objects.
"""
n = NestedObjects(using=DEFAULT_DB_ALIAS)
objs = [Event.objects.create()]
EventGuide.objects.create(event=objs[0])
with self.assertNumQueries(2):
# One for Location, one for Guest, and no query for EventGuide
n.collect(objs)
class UtilTests(SimpleTestCase):
def test_values_from_lookup_field(self):
"""
Regression test for #12654: lookup_field
"""
SITE_NAME = 'example.com'
TITLE_TEXT = 'Some title'
CREATED_DATE = datetime.min
ADMIN_METHOD = 'admin method'
SIMPLE_FUNCTION = 'function'
INSTANCE_ATTRIBUTE = 'attr'
class MockModelAdmin(object):
def get_admin_value(self, obj):
return ADMIN_METHOD
simple_function = lambda obj: SIMPLE_FUNCTION
article = Article(
site=Site(domain=SITE_NAME),
title=TITLE_TEXT,
created=CREATED_DATE,
)
article.non_field = INSTANCE_ATTRIBUTE
verifications = (
('site', SITE_NAME),
('created', localize(CREATED_DATE)),
('title', TITLE_TEXT),
('get_admin_value', ADMIN_METHOD),
(simple_function, SIMPLE_FUNCTION),
('test_from_model', article.test_from_model()),
('non_field', INSTANCE_ATTRIBUTE)
)
mock_admin = MockModelAdmin()
for name, value in verifications:
field, attr, resolved_value = lookup_field(name, article, mock_admin)
if field is not None:
resolved_value = display_for_field(resolved_value, field)
self.assertEqual(value, resolved_value)
def test_null_display_for_field(self):
"""
Regression test for #12550: display_for_field should handle None
value.
"""
display_value = display_for_field(None, models.CharField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
display_value = display_for_field(None, models.CharField(
choices=(
(None, "test_none"),
)
))
self.assertEqual(display_value, "test_none")
display_value = display_for_field(None, models.DateField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
display_value = display_for_field(None, models.TimeField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
# Regression test for #13071: NullBooleanField has special
# handling.
display_value = display_for_field(None, models.NullBooleanField())
expected = '<img src="%sadmin/img/icon-unknown.gif" alt="None" />' % settings.STATIC_URL
self.assertHTMLEqual(display_value, expected)
display_value = display_for_field(None, models.DecimalField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
display_value = display_for_field(None, models.FloatField())
self.assertEqual(display_value, EMPTY_CHANGELIST_VALUE)
def test_label_for_field(self):
"""
Tests for label_for_field
"""
self.assertEqual(
label_for_field("title", Article),
"title"
)
self.assertEqual(
label_for_field("title2", Article),
"another name"
)
self.assertEqual(
label_for_field("title2", Article, return_attr=True),
("another name", None)
)
self.assertEqual(
label_for_field("__unicode__", Article),
"article"
)
self.assertEqual(
label_for_field("__str__", Article),
str("article")
)
self.assertRaises(
AttributeError,
lambda: label_for_field("unknown", Article)
)
def test_callable(obj):
return "nothing"
self.assertEqual(
label_for_field(test_callable, Article),
"Test callable"
)
self.assertEqual(
label_for_field(test_callable, Article, return_attr=True),
("Test callable", test_callable)
)
self.assertEqual(
label_for_field("test_from_model", Article),
"Test from model"
)
self.assertEqual(
label_for_field("test_from_model", Article, return_attr=True),
("Test from model", Article.test_from_model)
)
self.assertEqual(
label_for_field("test_from_model_with_override", Article),
"not What you Expect"
)
self.assertEqual(
label_for_field(lambda x: "nothing", Article),
"--"
)
class MockModelAdmin(object):
def test_from_model(self, obj):
return "nothing"
test_from_model.short_description = "not Really the Model"
self.assertEqual(
label_for_field("test_from_model", Article, model_admin=MockModelAdmin),
"not Really the Model"
)
self.assertEqual(
label_for_field("test_from_model", Article,
model_admin = MockModelAdmin,
return_attr = True
),
("not Really the Model", MockModelAdmin.test_from_model)
)
def test_label_for_property(self):
# NOTE: cannot use @property decorator, because of
# AttributeError: 'property' object has no attribute 'short_description'
class MockModelAdmin(object):
def my_property(self):
return "this if from property"
my_property.short_description = 'property short description'
test_from_property = property(my_property)
self.assertEqual(
label_for_field("test_from_property", Article, model_admin=MockModelAdmin),
'property short description'
)
def test_related_name(self):
"""
Regression test for #13963
"""
self.assertEqual(
label_for_field('location', Event, return_attr=True),
('location', None),
)
self.assertEqual(
label_for_field('event', Location, return_attr=True),
('awesome event', None),
)
self.assertEqual(
label_for_field('guest', Event, return_attr=True),
('awesome guest', None),
)
def test_logentry_unicode(self):
"""
Regression test for #15661
"""
log_entry = admin.models.LogEntry()
log_entry.action_flag = admin.models.ADDITION
self.assertTrue(
six.text_type(log_entry).startswith('Added ')
)
log_entry.action_flag = admin.models.CHANGE
self.assertTrue(
six.text_type(log_entry).startswith('Changed ')
)
log_entry.action_flag = admin.models.DELETION
self.assertTrue(
six.text_type(log_entry).startswith('Deleted ')
)
# Make sure custom action_flags works
log_entry.action_flag = 4
self.assertEqual(six.text_type(log_entry), 'LogEntry Object')
def test_safestring_in_field_label(self):
# safestring should not be escaped
class MyForm(forms.Form):
text = forms.CharField(label=mark_safe('<i>text</i>'))
cb = forms.BooleanField(label=mark_safe('<i>cb</i>'))
form = MyForm()
self.assertHTMLEqual(helpers.AdminField(form, 'text', is_first=False).label_tag(),
'<label for="id_text" class="required inline"><i>text</i>:</label>')
self.assertHTMLEqual(helpers.AdminField(form, 'cb', is_first=False).label_tag(),
'<label for="id_cb" class="vCheckboxLabel required inline"><i>cb</i></label>')
# normal strings needs to be escaped
class MyForm(forms.Form):
text = forms.CharField(label='&text')
cb = forms.BooleanField(label='&cb')
form = MyForm()
self.assertHTMLEqual(helpers.AdminField(form, 'text', is_first=False).label_tag(),
'<label for="id_text" class="required inline">&text:</label>')
self.assertHTMLEqual(helpers.AdminField(form, 'cb', is_first=False).label_tag(),
'<label for="id_cb" class="vCheckboxLabel required inline">&cb</label>')
def test_flatten_fieldsets(self):
"""
Regression test for #18051
"""
fieldsets = (
(None, {
'fields': ('url', 'title', ('content', 'sites'))
}),
)
self.assertEqual(flatten_fieldsets(fieldsets), ['url', 'title', 'content', 'sites'])
fieldsets = (
(None, {
'fields': ('url', 'title', ['content', 'sites'])
}),
)
self.assertEqual(flatten_fieldsets(fieldsets), ['url', 'title', 'content', 'sites'])
| apache-2.0 |
jilljenn/django-planet | planet/views.py | 1 | 9854 | # -*- coding: utf-8 -*-
from django.core.urlresolvers import reverse, reverse_lazy
from django.forms import ValidationError
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response, get_object_or_404, redirect
from django.template import RequestContext
from django.utils.translation import ugettext_lazy as _
from django.views.generic import ListView
from django.views.generic.detail import SingleObjectMixin
from django.views.generic.edit import CreateView, DeleteView
from django.http import Http404
from planet.models import Blog, Feed, Author, Post
from planet.forms import SearchForm
from tagging.models import Tag, TaggedItem
def index(request):
posts = Post.site_objects.all().order_by("-date_modified")
return render_to_response("planet/posts/list.html", {"posts": posts},
context_instance=RequestContext(request))
def blogs_list(request):
blogs_list = Blog.site_objects.all()
return render_to_response("planet/blogs/list.html",
{"blogs_list": blogs_list}, context_instance=RequestContext(request))
def blog_detail(request, blog_id, slug=None):
blog = get_object_or_404(Blog, pk=blog_id)
if slug is None:
return redirect(blog, permanent=True)
posts = Post.site_objects.filter(feed__blog=blog).order_by("-date_modified")
return render_to_response("planet/blogs/detail.html",
{"blog": blog, "posts": posts},
context_instance=RequestContext(request))
def feeds_list(request):
feeds_list = Feed.site_objects.all()
return render_to_response("planet/feeds/list.html",
{"feeds_list": feeds_list}, context_instance=RequestContext(request))
def feed_detail(request, feed_id, tag=None, slug=None):
feed = get_object_or_404(Feed, pk=feed_id)
if not slug:
return redirect(feed, permanent=True)
if tag:
tag = get_object_or_404(Tag, name=tag)
posts = TaggedItem.objects.get_by_model(
Post.site_objects, tag).filter(feed=feed).order_by("-date_modified")
else:
posts = Post.site_objects.filter(feed=feed).order_by("-date_modified")
return render_to_response("planet/feeds/detail.html",
{"feed": feed, "posts": posts, "tag": tag},
context_instance=RequestContext(request))
def authors_list(request):
authors = Author.site_objects.all()
return render_to_response("planet/authors/list.html",
{"authors_list": authors},
context_instance=RequestContext(request))
def author_detail(request, author_id, tag=None, slug=None):
author = get_object_or_404(Author, pk=author_id)
if not slug:
return redirect(author, permanent=True)
if tag:
tag = get_object_or_404(Tag, name=tag)
posts = TaggedItem.objects.get_by_model(Post.site_objects, tag).filter(
authors=author).order_by("-date_modified")
else:
posts = Post.site_objects.filter(
authors=author).order_by("-date_modified")
return render_to_response("planet/authors/detail.html",
{"author": author, "posts": posts, "tag": tag},
context_instance=RequestContext(request))
def posts_list(request):
posts = Post.site_objects.all().select_related("feed", "blog", "authors")\
.order_by("-date_modified")
return render_to_response("planet/posts/list.html", {"posts": posts},
context_instance=RequestContext(request))
def post_detail(request, post_id, slug=None):
post = get_object_or_404(
Post.objects.select_related("feed", "authors", "blog"), pk=post_id)
if not slug:
return redirect(post, permanent=True)
return render_to_response("planet/posts/detail.html", {"post": post},
context_instance=RequestContext(request))
def tag_detail(request, tag):
tag = get_object_or_404(Tag, name=tag)
posts = TaggedItem.objects.get_by_model(
Post.site_objects, tag).order_by("-date_modified")
return render_to_response("planet/tags/detail.html", {"posts": posts,
"tag": tag}, context_instance=RequestContext(request))
def tag_authors_list(request, tag):
tag = get_object_or_404(Tag, name=tag)
posts_list = TaggedItem.objects.get_by_model(Post.site_objects, tag)
authors = set()
for post in posts_list:
for author in post.authors.all():
authors.add(author)
return render_to_response("planet/authors/list_for_tag.html",
{"authors": list(authors), "tag": tag},
context_instance=RequestContext(request))
def tag_feeds_list(request, tag):
tag = get_object_or_404(Tag, name=tag)
post_ids = TaggedItem.objects.get_by_model(Post.site_objects, tag
).values_list("id", flat=True)
feeds_list = Feed.site_objects.filter(post__in=post_ids).distinct()
return render_to_response("planet/feeds/list_for_tag.html",
{"feeds_list": feeds_list, "tag": tag},
context_instance=RequestContext(request))
def tags_cloud(request, min_posts_count=1):
tags_cloud = Tag.objects.cloud_for_model(Post)
return render_to_response("planet/tags/cloud.html",
{"tags_cloud": tags_cloud}, context_instance=RequestContext(request))
def foaf(request):
# TODO: use http://code.google.com/p/django-foaf/ instead of this
feeds = Feed.site_objects.all().select_related("blog")
return render_to_response("planet/microformats/foaf.xml", {"feeds": feeds},
context_instance=RequestContext(request), content_type="text/xml")
def opml(request):
feeds = Feed.site_objects.all().select_related("blog")
return render_to_response("planet/microformats/opml.xml", {"feeds": feeds},
context_instance=RequestContext(request), content_type="text/xml")
def search(request):
if request.method == "GET" and request.GET.get("search") == "go":
search_form = SearchForm(request.GET)
if search_form.is_valid():
query = search_form.cleaned_data["q"]
if search_form.cleaned_data["w"] == "posts":
params_dict = {"title__icontains": query}
posts = Post.site_objects.filter(**params_dict
).distinct().order_by("-date_modified")
return render_to_response("planet/posts/list.html",
{"posts": posts}, context_instance=RequestContext(request))
elif search_form.cleaned_data["w"] == "tags":
params_dict = {"name__icontains": query}
tags_list = Tag.objects.filter(**params_dict
).distinct().order_by("name")
return render_to_response("planet/tags/list.html",
{"tags_list": tags_list},
context_instance=RequestContext(request))
elif search_form.cleaned_data["w"] == "blogs":
params_dict = {"title__icontains": query}
blogs_list = Blog.site_objects.filter(**params_dict
).order_by("title")
return render_to_response("planet/blogs/list.html",
{"blogs_list": blogs_list},
context_instance=RequestContext(request))
elif search_form.cleaned_data["w"] == "feeds":
params_dict = {"title__icontains": query}
feeds_list = Feed.site_objects.filter(**params_dict
).order_by("title")
return render_to_response("planet/feeds/list.html",
{"feeds_list": feeds_list},
context_instance=RequestContext(request))
elif search_form.cleaned_data["w"] == "authors":
params_dict = {"name__icontains": query}
authors_list = Author.site_objects.filter(**params_dict
).order_by("name")
return render_to_response("planet/authors/list.html",
{"authors_list": authors_list},
context_instance=RequestContext(request))
else:
return HttpResponseRedirect(reverse("planet_post_list"))
else:
return HttpResponseRedirect(reverse("planet_post_list"))
else:
return HttpResponseRedirect(reverse("planet_post_list"))
class FeedAddView(CreateView):
model = Feed
fields = ["url"]
template_name = 'planet/feeds/add.html'
success_message = _("Feed with url=%(url)s was created successfully")
def clean_url(self):
url = self.cleaned_data['url']
if Feed.objects.filter(url=url).count() > 0:
raise ValidationError(_('A feed with this URL already exists.'))
return url
def form_valid(self, form):
feed = form.save()
if self.request.user.is_authenticated():
feed.blog.owner = self.request.user
feed.blog.save()
self.object = feed
return HttpResponseRedirect(reverse("planet_index"))
class BlogListByUserView(ListView):
template_name = 'planet/blogs/list_by_user.html'
model = Blog
def get_queryset(self):
return Blog.objects.filter(owner=self.request.user)
class OwnedObjectMixin(SingleObjectMixin):
"""
An object that needs to verify current user ownership
before allowing manipulation.
From https://github.com/PyAr/pyarweb/blob/b4095c5c1b474a207e45918683de400974f6a739/community/views.py#L43
"""
def get_object(self, *args, **kwargs):
obj = super(OwnedObjectMixin, self).get_object(*args, **kwargs)
try:
if not obj.owner == self.request.user:
raise Http404()
except AttributeError:
pass
return obj
class BlogDeleteView(DeleteView, OwnedObjectMixin):
template_name = 'planet/blogs/confirm_delete.html'
model = Blog
success_url = reverse_lazy('planet_blog_list_by_user')
| bsd-3-clause |
cvegaj/ElectriCERT | venv3/lib/python3.6/site-packages/pkg_resources/extern/__init__.py | 245 | 2487 | import sys
class VendorImporter:
"""
A PEP 302 meta path importer for finding optionally-vendored
or otherwise naturally-installed packages from root_name.
"""
def __init__(self, root_name, vendored_names=(), vendor_pkg=None):
self.root_name = root_name
self.vendored_names = set(vendored_names)
self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor')
@property
def search_path(self):
"""
Search first the vendor package then as a natural package.
"""
yield self.vendor_pkg + '.'
yield ''
def find_module(self, fullname, path=None):
"""
Return self when fullname starts with root_name and the
target module is one vendored through this importer.
"""
root, base, target = fullname.partition(self.root_name + '.')
if root:
return
if not any(map(target.startswith, self.vendored_names)):
return
return self
def load_module(self, fullname):
"""
Iterate over the search path to locate and load fullname.
"""
root, base, target = fullname.partition(self.root_name + '.')
for prefix in self.search_path:
try:
extant = prefix + target
__import__(extant)
mod = sys.modules[extant]
sys.modules[fullname] = mod
# mysterious hack:
# Remove the reference to the extant package/module
# on later Python versions to cause relative imports
# in the vendor package to resolve the same modules
# as those going through this importer.
if sys.version_info > (3, 3):
del sys.modules[extant]
return mod
except ImportError:
pass
else:
raise ImportError(
"The '{target}' package is required; "
"normally this is bundled with this package so if you get "
"this warning, consult the packager of your "
"distribution.".format(**locals())
)
def install(self):
"""
Install this importer into sys.meta_path if not already present.
"""
if self not in sys.meta_path:
sys.meta_path.append(self)
names = 'packaging', 'pyparsing', 'six', 'appdirs'
VendorImporter(__name__, names).install()
| gpl-3.0 |
OmgOhnoes/Flexget | flexget/plugins/list/pending_list.py | 3 | 8325 | from __future__ import unicode_literals, division, absolute_import
from builtins import * # pylint: disable=unused-import, redefined-builtin
import logging
from collections import MutableSet
from datetime import datetime
from sqlalchemy import Column, Unicode, Integer, DateTime, or_, func, Boolean
from sqlalchemy.orm import relationship
from sqlalchemy.sql.elements import and_
from sqlalchemy.sql.schema import ForeignKey
from flexget import plugin, db_schema
from flexget.db_schema import versioned_base
from flexget.entry import Entry
from flexget.event import event
from flexget.manager import Session
from flexget.utils.database import entry_synonym, with_session
plugin_name = 'pending_list'
log = logging.getLogger(plugin_name)
Base = versioned_base(plugin_name, 0)
@db_schema.upgrade(plugin_name)
def upgrade(ver, session):
ver = 0
return ver
class PendingListList(Base):
__tablename__ = 'pending_list_lists'
id = Column(Integer, primary_key=True)
name = Column(Unicode, unique=True)
added = Column(DateTime, default=datetime.now)
entries = relationship('PendingListEntry', backref='list', cascade='all, delete, delete-orphan', lazy='dynamic')
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'added_on': self.added
}
class PendingListEntry(Base):
__tablename__ = 'wait_list_entries'
id = Column(Integer, primary_key=True)
list_id = Column(Integer, ForeignKey(PendingListList.id), nullable=False)
added = Column(DateTime, default=datetime.now)
title = Column(Unicode)
original_url = Column(Unicode)
_json = Column('json', Unicode)
entry = entry_synonym('_json')
approved = Column(Boolean)
def __init__(self, entry, pending_list_id):
self.title = entry['title']
self.original_url = entry.get('original_url') or entry['url']
self.entry = entry
self.list_id = pending_list_id
self.approved = False
def __repr__(self):
return '<PendingListEntry,title=%s,original_url=%s,approved=%s>' % (
self.title, self.original_url, self.approved)
def to_dict(self):
return {
'id': self.id,
'list_id': self.list_id,
'added_on': self.added,
'title': self.title,
'original_url': self.original_url,
'entry': dict(self.entry),
'approved': self.approved
}
class PendingListSet(MutableSet):
def _db_list(self, session):
return session.query(PendingListList).filter(PendingListList.name == self.config).first()
def __init__(self, config):
self.config = config
with Session() as session:
if not self._db_list(session):
session.add(PendingListList(name=self.config))
def _entry_query(self, session, entry, approved=None):
query = session.query(PendingListEntry).filter(PendingListEntry.list_id == self._db_list(session).id). \
filter(or_(PendingListEntry.title == entry['title'],
and_(PendingListEntry.original_url, PendingListEntry.original_url == entry['original_url'])))
if approved:
query = query.filter(PendingListEntry.approved == True)
return query.first()
def __iter__(self):
with Session() as session:
for e in self._db_list(session).entries.filter(PendingListEntry.approved == True).order_by(
PendingListEntry.added.desc()).all():
log.debug('returning %s', e.entry)
yield e.entry
def __contains__(self, entry):
with Session() as session:
return self._entry_query(session, entry) is not None
def __len__(self):
with Session() as session:
return self._db_list(session).entries.count()
def discard(self, entry):
with Session() as session:
db_entry = self._entry_query(session=session, entry=entry)
if db_entry:
log.debug('deleting entry %s', db_entry)
session.delete(db_entry)
def add(self, entry):
# Evaluate all lazy fields so that no db access occurs during our db session
entry.values()
with Session() as session:
stored_entry = self._entry_query(session, entry)
if stored_entry:
# Refresh all the fields if we already have this entry
log.debug('refreshing entry %s', entry)
stored_entry.entry = entry
else:
log.debug('adding entry %s to list %s', entry, self._db_list(session).name)
stored_entry = PendingListEntry(entry=entry, pending_list_id=self._db_list(session).id)
session.add(stored_entry)
def __ior__(self, other):
# Optimization to only open one session when adding multiple items
# Make sure lazy lookups are done before opening our session to prevent db locks
for value in other:
value.values()
for value in other:
self.add(value)
return self
@property
def immutable(self):
return False
def _from_iterable(self, it):
return set(it)
@property
def online(self):
""" Set the online status of the plugin, online plugin should be treated differently in certain situations,
like test mode"""
return False
def get(self, entry):
with Session() as session:
match = self._entry_query(session=session, entry=entry, approved=True)
return Entry(match.entry) if match else None
class PendingList(object):
schema = {'type': 'string'}
@staticmethod
def get_list(config):
return PendingListSet(config)
def on_task_input(self, task, config):
return list(PendingListSet(config))
@event('plugin.register')
def register_plugin():
plugin.register(PendingList, plugin_name, api_ver=2, interfaces=['task', 'list'])
@with_session
def get_pending_lists(name=None, session=None):
log.debug('retrieving pending lists')
query = session.query(PendingListList)
if name:
log.debug('searching for pending lists with name %s', name)
query = query.filter(PendingListList.name.contains(name))
return query.all()
@with_session
def get_list_by_exact_name(name, session=None):
log.debug('returning pending list with name %s', name)
return session.query(PendingListList).filter(func.lower(PendingListList.name) == name.lower()).one()
@with_session
def get_list_by_id(list_id, session=None):
log.debug('returning pending list with id %d', list_id)
return session.query(PendingListList).filter(PendingListList.id == list_id).one()
@with_session
def delete_list_by_id(list_id, session=None):
entry_list = get_list_by_id(list_id=list_id, session=session)
if entry_list:
log.debug('deleting pending list with id %d', list_id)
session.delete(entry_list)
@with_session
def get_entries_by_list_id(list_id, start=None, stop=None, order_by='title', descending=False, approved=False,
session=None):
log.debug('querying entries from pending list with id %d', list_id)
query = session.query(PendingListEntry).filter(PendingListEntry.list_id == list_id)
if approved:
query = query.filter(PendingListEntry.approved is approved)
if descending:
query = query.order_by(getattr(PendingListEntry, order_by).desc())
else:
query = query.order_by(getattr(PendingListEntry, order_by))
return query.slice(start, stop).all()
@with_session
def get_entry_by_title(list_id, title, session=None):
entry_list = get_list_by_id(list_id=list_id, session=session)
if entry_list:
log.debug('fetching entry with title `%s` from list id %d', title, list_id)
return session.query(PendingListEntry).filter(and_(
PendingListEntry.title == title, PendingListEntry.list_id == list_id)).first()
@with_session
def get_entry_by_id(list_id, entry_id, session=None):
log.debug('fetching entry with id %d from list id %d', entry_id, list_id)
return session.query(PendingListEntry).filter(
and_(PendingListEntry.id == entry_id, PendingListEntry.list_id == list_id)).one()
| mit |
bartoldeman/easybuild-framework | easybuild/toolchains/compiler/gcc.py | 1 | 8302 | ##
# Copyright 2012-2018 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Support for GCC (GNU Compiler Collection) as toolchain compiler.
:author: Stijn De Weirdt (Ghent University)
:author: Kenneth Hoste (Ghent University)
"""
import re
from distutils.version import LooseVersion
import easybuild.tools.systemtools as systemtools
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.toolchain.compiler import Compiler, DEFAULT_OPT_LEVEL
TC_CONSTANT_GCC = "GCC"
class Gcc(Compiler):
"""GCC compiler class"""
COMPILER_MODULE_NAME = ['GCC']
COMPILER_FAMILY = TC_CONSTANT_GCC
COMPILER_UNIQUE_OPTS = {
'loop': (False, "Automatic loop parallellisation"),
'f2c': (False, "Generate code compatible with f2c and f77"),
'lto':(False, "Enable Link Time Optimization"),
}
COMPILER_UNIQUE_OPTION_MAP = {
'i8': 'fdefault-integer-8',
'r8': 'fdefault-real-8',
'unroll': 'funroll-loops',
'f2c': 'ff2c',
'loop': ['ftree-switch-conversion', 'floop-interchange', 'floop-strip-mine', 'floop-block'],
'lto': 'flto',
'ieee': ['mieee-fp', 'fno-trapping-math'],
'strict': ['mieee-fp', 'mno-recip'],
'precise': ['mno-recip'],
'defaultprec': ['fno-math-errno'],
'loose': ['fno-math-errno', 'mrecip', 'mno-ieee-fp'],
'veryloose': ['fno-math-errno', 'mrecip=all', 'mno-ieee-fp'],
'vectorize': {False: 'fno-tree-vectorize', True: 'ftree-vectorize'},
DEFAULT_OPT_LEVEL: ['O2', 'ftree-vectorize'],
}
# used when 'optarch' toolchain option is enabled (and --optarch is not specified)
COMPILER_OPTIMAL_ARCHITECTURE_OPTION = {
(systemtools.AARCH32, systemtools.ARM): 'mcpu=native', # implies -march=native and -mtune=native
(systemtools.AARCH64, systemtools.ARM): 'mcpu=native', # since GCC 6; implies -march=native and -mtune=native
(systemtools.POWER, systemtools.POWER): 'mcpu=native', # no support for -march on POWER; implies -mtune=native
(systemtools.POWER, systemtools.POWER_LE): 'mcpu=native', # no support for -march on POWER; implies -mtune=native
(systemtools.X86_64, systemtools.AMD): 'march=native', # implies -mtune=native
(systemtools.X86_64, systemtools.INTEL): 'march=native', # implies -mtune=native
}
# used with --optarch=GENERIC
COMPILER_GENERIC_OPTION = {
(systemtools.AARCH32, systemtools.ARM): 'mcpu=generic-armv7', # implies -march=armv7 and -mtune=generic-armv7
(systemtools.AARCH64, systemtools.ARM): 'mcpu=generic', # implies -march=armv8-a and -mtune=generic
(systemtools.POWER, systemtools.POWER): 'mcpu=powerpc64', # no support for -march on POWER
(systemtools.POWER, systemtools.POWER_LE): 'mcpu=powerpc64le', # no support for -march on POWER
(systemtools.X86_64, systemtools.AMD): 'march=x86-64 -mtune=generic',
(systemtools.X86_64, systemtools.INTEL): 'march=x86-64 -mtune=generic',
}
COMPILER_CC = 'gcc'
COMPILER_CXX = 'g++'
COMPILER_C_UNIQUE_FLAGS = []
COMPILER_F77 = 'gfortran'
COMPILER_F90 = 'gfortran'
COMPILER_FC = 'gfortran'
COMPILER_F_UNIQUE_FLAGS = ['f2c']
LIB_MULTITHREAD = ['pthread']
LIB_MATH = ['m']
def _set_compiler_vars(self):
super(Gcc, self)._set_compiler_vars()
if self.options.get('32bit', None):
raise EasyBuildError("_set_compiler_vars: 32bit set, but no support yet for 32bit GCC in EasyBuild")
# to get rid of lots of problems with libgfortranbegin
# or remove the system gcc-gfortran
# also used in eg LIBBLAS variable
self.variables.nappend('FLIBS', "gfortran", position=5)
# append lib dir paths to LDFLAGS (only if the paths are actually there)
# Note: hardcode 'GCC' here; we can not reuse COMPILER_MODULE_NAME because
# it can be redefined by combining GCC with other compilers (e.g., Clang).
gcc_root = get_software_root('GCCcore')
if gcc_root is None:
gcc_root = get_software_root('GCC')
if gcc_root is None:
raise EasyBuildError("Failed to determine software root for GCC")
self.variables.append_subdirs("LDFLAGS", gcc_root, subdirs=["lib64", "lib"])
def _set_optimal_architecture(self, default_optarch=None):
"""
GCC-specific adjustments for optimal architecture flags.
:param default_optarch: default value to use for optarch, rather than using default value based on architecture
(--optarch and --optarch=GENERIC still override this value)
"""
if default_optarch is None and self.arch == systemtools.AARCH64:
gcc_version = get_software_version('GCCcore')
if gcc_version is None:
gcc_version = get_software_version('GCC')
if gcc_version is None:
raise EasyBuildError("Failed to determine software version for GCC")
if LooseVersion(gcc_version) < LooseVersion('6'):
# on AArch64, -mcpu=native is not supported prior to GCC 6,
# so try to guess a proper default optarch if none was specified
default_optarch = self._guess_aarch64_default_optarch()
super(Gcc, self)._set_optimal_architecture(default_optarch=default_optarch)
def _guess_aarch64_default_optarch(self):
"""
Guess default optarch for AARCH64 (vanilla ARM cores only)
This heuristic may fail if the CPU module is not supported by the GCC version being used.
"""
default_optarch = None
cpu_vendor = systemtools.get_cpu_vendor()
cpu_model = systemtools.get_cpu_model()
if cpu_vendor == systemtools.ARM and cpu_model.startswith('ARM '):
self.log.debug("Determining architecture-specific optimization flag for ARM (model: %s)", cpu_model)
core_types = []
for core_type in [ct.strip().lower() for ct in cpu_model[4:].split('+')]:
# Determine numeric ID for each core type, since we need to sort them later numerically
res = re.search('\d+$', core_type) # note: numeric ID is expected at the end
if res:
core_id = int(res.group(0))
core_types.append((core_id, core_type))
self.log.debug("Extracted numeric ID for ARM core type '%s': %s", core_type, core_id)
else:
# Bail out if we can't determine numeric ID
core_types = None
self.log.debug("Failed to extract numeric ID for ARM core type '%s', bailing out", core_type)
break
if core_types:
# On big.LITTLE setups, sort core types to have big core (higher model number) first.
# Example: 'mcpu=cortex-a72.cortex-a53' for "ARM Cortex-A53 + Cortex-A72"
default_optarch = 'mcpu=%s' % '.'.join([ct[1] for ct in sorted(core_types, reverse=True)])
self.log.debug("Using architecture-specific compiler optimization flag '%s'", default_optarch)
return default_optarch
| gpl-2.0 |
jf-parent/webbase | {{cookiecutter.project_name}}/admin/app.py | 1 | 11726 | import os
import importlib
import base64
import logging
from logging.handlers import TimedRotatingFileHandler
import sys
import asyncio
from cryptography import fernet
from pymongo import MongoClient
from bson.objectid import ObjectId
from wtforms import form, fields, validators
from flask import Flask, url_for, redirect, request, flash
import flask_admin as admin
from flask_admin import expose, helpers
import flask_login as login
from flask_mongoengine.wtf.fields import DictField
from flask_admin.babel import gettext
from flask_admin.form import Select2Widget
from flask_admin.contrib.pymongo import ModelView
from flask.ext.session import Session
# PATH
HERE = os.path.abspath(os.path.dirname(__file__))
ROOT = os.path.join(HERE, '..')
sys.path.append(ROOT)
from server.model.user import User # noqa
from server.utils import DbSessionContext # noqa
from server.settings import config # noqa
from server import exceptions # noqa
# LOOP
loop = asyncio.get_event_loop()
asyncio.set_event_loop(loop)
# CONFIG
config_path = os.path.join(ROOT, 'configs', 'server.json')
config.configure(config_path)
# LOGGING
# DISABLE werkzeug
werkzeug_logger = logging.getLogger('werkzeug')
werkzeug_logger.setLevel(logging.ERROR)
# {{cookiecutter.project_name|upper}} ADMIN
logger = logging.getLogger('{{cookiecutter.project_name|lower}}_admin')
logger.setLevel(getattr(logging, config.get('admin').get('log_level', 'INFO')))
formatter = logging.Formatter(
'[L:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s',
datefmt='%d-%m-%Y %H:%M:%S'
)
# StreamHandler
sh = logging.StreamHandler()
sh.setFormatter(formatter)
logger.addHandler(sh)
# FileHandler
fh = TimedRotatingFileHandler(
os.path.join(ROOT, 'logs', 'admin_server.log'),
when="midnight"
)
fh.setFormatter(formatter)
logger.addHandler(fh)
# MONGO
pymongo_client = MongoClient()
db = pymongo_client[config.get('mongo_database_name')]
# APP
app = Flask(__name__)
# SECRET KEY
fernet_key = fernet.Fernet.generate_key()
secret_key = base64.urlsafe_b64decode(fernet_key)
app.config['SECRET_KEY'] = secret_key
# SESSION
app.config['SESSION_TYPE'] = 'redis'
sess = Session()
sess.init_app(app)
class BaseView(ModelView):
def is_accessible(self):
return login.current_user.is_authenticated
def inaccessible_callback(self, name, **kwargs):
return redirect(url_for('index'))
def create_model(self, form):
model = form.data
self.on_model_change(form, model, True)
return True
def update_model(self, form, model):
self.on_model_change(form, model, False)
return True
def on_model_change(self, form, model, is_created):
logger.info("on_model_change")
with DbSessionContext(config.get('mongo_database_name')) as session:
try:
m = importlib.import_module(
'server.model.{model}'
.format(model=self.name.lower())
)
model_class = getattr(m, self.name)
if not is_created:
model_obj = session.query(model_class)\
.filter(model_class.mongo_id == model['_id'])\
.one()
else:
model_obj = model_class()
context = {}
context['db_session'] = session
context['author'] = login.current_user
context['data'] = form.data
context['save'] = True
loop.run_until_complete(model_obj.validate_and_save(context))
pk = model_obj.get_uid()
self.coll.update({'_id': pk}, model)
except Exception as e:
if isinstance(e, exceptions.ServerBaseException):
flash(
gettext(
'Failed to update record. %(exception)s(%(error)s)',
exception=e.get_name(),
error=e
),
'error'
)
else:
flash(
gettext(
'Failed to update record. %(error)s',
error=e
),
'error'
)
return False
else:
self.after_model_change(form, model, True)
return True
# TODO refactor and make it more general
# currently only support the mapping between
# model.user_uid to user.email
class UidToEmailView(BaseView):
def get_list(self, *args, **kwargs):
count, data = super(UidToEmailView, self).get_list(*args, **kwargs)
query = {'_id': {'$in': [x['user_uid'] for x in data]{{"}}"}}
users = db.User.find(query, projection=['email'])
users_map = dict((x['_id'], x['email']) for x in users)
for item in data:
item['user_email'] = users_map.get(item['user_uid'])
return count, data
def _feed_user_choices(self, form):
users = db.User.find(projection=['email'])
form.user_uid.choices = [(str(x['_id']), x['email']) for x in users]
return form
def create_form(self):
form = super(UidToEmailView, self).create_form()
return self._feed_user_choices(form)
def edit_form(self, obj):
form = super(UidToEmailView, self).edit_form(obj)
return self._feed_user_choices(form)
def on_model_change(self, form, model, is_created):
user_uid = model.get('user_uid')
model['user_uid'] = ObjectId(user_uid)
return super(UidToEmailView, self).on_model_change(
form,
model,
is_created
)
def _search(self, query, search_term):
m = importlib.import_module(
'server.model.{model}'.format(model=self.name.lower())
)
model_class = getattr(m, self.name)
with DbSessionContext(config.get('mongo_database_name')) as session:
user_query = session.query(User)\
.filter(User.email == search_term)
if user_query.count():
user = user_query.one()
query_model = session.query(model_class)\
.filter(model_class.user_uid == user.get_uid())
query = query_model.query
return query
class EmailConfirmationTokenForm(form.Form):
user_uid = fields.SelectField('User', widget=Select2Widget())
token = fields.TextField()
used = fields.BooleanField()
class EmailConfirmationTokenView(UidToEmailView):
column_list = ('token', 'user_email', 'used')
column_sortable_list = ('token', 'user_email', 'used')
column_searchable_list = ('user_uid')
form = EmailConfirmationTokenForm
class ResetPasswordTokenForm(form.Form):
user_uid = fields.SelectField('User', widget=Select2Widget())
token = fields.TextField()
expiration_datetime = fields.TextField()
used = fields.BooleanField()
password_reset = fields.BooleanField()
class ResetPasswordTokenView(UidToEmailView):
column_list = ('token', 'user_email', 'used')
column_sortable_list = ('token', 'user_email', 'used')
column_searchable_list = ('user_uid')
form = ResetPasswordTokenForm
class NotificationForm(form.Form):
user_uid = fields.SelectField('User', widget=Select2Widget())
message = fields.TextField()
template_data = DictField()
seen = fields.BooleanField()
target_url = fields.TextField()
class NotificationView(UidToEmailView):
column_list = ('user_email', 'message')
column_sortable_list = ('user_email', 'message', 'seen_timestamp')
column_searchable_list = ('user_uid')
form = NotificationForm
class UserForm(form.Form):
name = fields.TextField('Name', [validators.DataRequired()])
email = fields.TextField(
'Email',
[validators.DataRequired(), validators.Email()]
)
role = fields.SelectField(
'Role',
choices=[('admin', 'admin'), ('user', 'user')]
)
enable = fields.BooleanField('Enable')
email_confirmed = fields.BooleanField('Email confirmed')
password = fields.PasswordField('Password')
class UserView(BaseView):
column_list = ('_id', 'name', 'email', 'role', 'enable', 'email_confirmed')
column_sortable_list = (
'name',
'email',
'role',
'enable',
'email_confirmed'
)
column_searchable_list = ('name', 'email')
form = UserForm
class Admin(object):
username = config.get('admin').get('username')
password = config.get('admin').get('password')
role = 'admin'
def __repr__(self):
return "Admin"
def is_authenticated(self):
# logger.debug('Admin.is_authenticated')
return True
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.username
def __unicode__(self):
return self.username
class LoginForm(form.Form):
login = fields.TextField(validators=[validators.required()])
password = fields.PasswordField(validators=[validators.required()])
def validate_login(self, field):
user = self.get_user()
if user is None:
raise validators.ValidationError('Invalid user')
if user.password != self.password.data:
raise validators.ValidationError('Invalid password')
def get_user(self):
if self.login.data == config.get('admin').get('username'):
return Admin()
else:
return None
def init_login():
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
if user_id == config.get('admin').get('username'):
return Admin()
else:
return None
class MyAdminIndexView(admin.AdminIndexView):
@expose('/')
def index(self):
if not login.current_user.is_authenticated:
# logger.debug('not login.current_user.is_authenticated
# redirect to login_view')
return redirect(url_for('.login_view'))
return super(MyAdminIndexView, self).index()
@expose('/login/', methods=('GET', 'POST'))
def login_view(self):
form = LoginForm(request.form)
if helpers.validate_form_on_submit(form):
user = form.get_user()
login.login_user(user)
if login.current_user.is_authenticated:
# logger.debug('login.current_user.is_authenticated
# redirect to index')
return redirect(url_for('.index'))
self._template_args['form'] = form
return super(MyAdminIndexView, self).index()
@expose('/logout/')
def logout_view(self):
login.logout_user()
return redirect(url_for('.index'))
@app.route('/')
def index():
return redirect(url_for('admin.index'))
init_login()
admin = admin.Admin(
app,
'{{cookiecutter.project_name}} - admin',
index_view=MyAdminIndexView(),
base_template='my_master.html'
)
admin.add_view(UserView(db.User, 'User'))
admin.add_view(NotificationView(db.Notification, 'Notification'))
admin.add_view(
EmailConfirmationTokenView(
db.Emailconfirmationtoken,
'Emailconfirmationtoken'
)
)
admin.add_view(
ResetPasswordTokenView(
db.Resetpasswordtoken,
'Resetpasswordtoken'
)
)
if __name__ == '__main__':
host = config.get('admin').get('host')
port = config.get('admin').get('port')
debug = config.get('admin').get('debug')
app.run(host=host, port=port, debug=debug)
| mit |
JetBrains/intellij-community | plugins/hg4idea/testData/bin/mercurial/merge.py | 90 | 28398 | # merge.py - directory-level update/merge handling for Mercurial
#
# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import nullid, nullrev, hex, bin
from i18n import _
from mercurial import obsolete
import error, util, filemerge, copies, subrepo, worker, dicthelpers
import errno, os, shutil
class mergestate(object):
'''track 3-way merge state of individual files'''
def __init__(self, repo):
self._repo = repo
self._dirty = False
self._read()
def reset(self, node=None):
self._state = {}
if node:
self._local = node
shutil.rmtree(self._repo.join("merge"), True)
self._dirty = False
def _read(self):
self._state = {}
try:
f = self._repo.opener("merge/state")
for i, l in enumerate(f):
if i == 0:
self._local = bin(l[:-1])
else:
bits = l[:-1].split("\0")
self._state[bits[0]] = bits[1:]
f.close()
except IOError, err:
if err.errno != errno.ENOENT:
raise
self._dirty = False
def commit(self):
if self._dirty:
f = self._repo.opener("merge/state", "w")
f.write(hex(self._local) + "\n")
for d, v in self._state.iteritems():
f.write("\0".join([d] + v) + "\n")
f.close()
self._dirty = False
def add(self, fcl, fco, fca, fd):
hash = util.sha1(fcl.path()).hexdigest()
self._repo.opener.write("merge/" + hash, fcl.data())
self._state[fd] = ['u', hash, fcl.path(), fca.path(),
hex(fca.filenode()), fco.path(), fcl.flags()]
self._dirty = True
def __contains__(self, dfile):
return dfile in self._state
def __getitem__(self, dfile):
return self._state[dfile][0]
def __iter__(self):
l = self._state.keys()
l.sort()
for f in l:
yield f
def mark(self, dfile, state):
self._state[dfile][0] = state
self._dirty = True
def resolve(self, dfile, wctx, octx):
if self[dfile] == 'r':
return 0
state, hash, lfile, afile, anode, ofile, flags = self._state[dfile]
fcd = wctx[dfile]
fco = octx[ofile]
fca = self._repo.filectx(afile, fileid=anode)
# "premerge" x flags
flo = fco.flags()
fla = fca.flags()
if 'x' in flags + flo + fla and 'l' not in flags + flo + fla:
if fca.node() == nullid:
self._repo.ui.warn(_('warning: cannot merge flags for %s\n') %
afile)
elif flags == fla:
flags = flo
# restore local
f = self._repo.opener("merge/" + hash)
self._repo.wwrite(dfile, f.read(), flags)
f.close()
r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca)
if r is None:
# no real conflict
del self._state[dfile]
elif not r:
self.mark(dfile, 'r')
return r
def _checkunknownfile(repo, wctx, mctx, f):
return (not repo.dirstate._ignore(f)
and os.path.isfile(repo.wjoin(f))
and repo.dirstate.normalize(f) not in repo.dirstate
and mctx[f].cmp(wctx[f]))
def _checkunknown(repo, wctx, mctx):
"check for collisions between unknown files and files in mctx"
error = False
for f in mctx:
if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
error = True
wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
if error:
raise util.Abort(_("untracked files in working directory differ "
"from files in requested revision"))
def _forgetremoved(wctx, mctx, branchmerge):
"""
Forget removed files
If we're jumping between revisions (as opposed to merging), and if
neither the working directory nor the target rev has the file,
then we need to remove it from the dirstate, to prevent the
dirstate from listing the file when it is no longer in the
manifest.
If we're merging, and the other revision has removed a file
that is not present in the working directory, we need to mark it
as removed.
"""
actions = []
state = branchmerge and 'r' or 'f'
for f in wctx.deleted():
if f not in mctx:
actions.append((f, state, None, "forget deleted"))
if not branchmerge:
for f in wctx.removed():
if f not in mctx:
actions.append((f, "f", None, "forget removed"))
return actions
def _checkcollision(repo, wmf, actions, prompts):
# build provisional merged manifest up
pmmf = set(wmf)
def addop(f, args):
pmmf.add(f)
def removeop(f, args):
pmmf.discard(f)
def nop(f, args):
pass
def renameop(f, args):
f2, fd, flags = args
if f:
pmmf.discard(f)
pmmf.add(fd)
def mergeop(f, args):
f2, fd, move = args
if move:
pmmf.discard(f)
pmmf.add(fd)
opmap = {
"a": addop,
"d": renameop,
"dr": nop,
"e": nop,
"f": addop, # untracked file should be kept in working directory
"g": addop,
"m": mergeop,
"r": removeop,
"rd": nop,
}
for f, m, args, msg in actions:
op = opmap.get(m)
assert op, m
op(f, args)
opmap = {
"cd": addop,
"dc": addop,
}
for f, m in prompts:
op = opmap.get(m)
assert op, m
op(f, None)
# check case-folding collision in provisional merged manifest
foldmap = {}
for f in sorted(pmmf):
fold = util.normcase(f)
if fold in foldmap:
raise util.Abort(_("case-folding collision between %s and %s")
% (f, foldmap[fold]))
foldmap[fold] = f
def manifestmerge(repo, wctx, p2, pa, branchmerge, force, partial,
acceptremote=False):
"""
Merge p1 and p2 with ancestor pa and generate merge action list
branchmerge and force are as passed in to update
partial = function to filter file lists
acceptremote = accept the incoming changes without prompting
"""
overwrite = force and not branchmerge
actions, copy, movewithdir = [], {}, {}
followcopies = False
if overwrite:
pa = wctx
elif pa == p2: # backwards
pa = wctx.p1()
elif not branchmerge and not wctx.dirty(missing=True):
pass
elif pa and repo.ui.configbool("merge", "followcopies", True):
followcopies = True
# manifests fetched in order are going to be faster, so prime the caches
[x.manifest() for x in
sorted(wctx.parents() + [p2, pa], key=lambda x: x.rev())]
if followcopies:
ret = copies.mergecopies(repo, wctx, p2, pa)
copy, movewithdir, diverge, renamedelete = ret
for of, fl in diverge.iteritems():
actions.append((of, "dr", (fl,), "divergent renames"))
for of, fl in renamedelete.iteritems():
actions.append((of, "rd", (fl,), "rename and delete"))
repo.ui.note(_("resolving manifests\n"))
repo.ui.debug(" branchmerge: %s, force: %s, partial: %s\n"
% (bool(branchmerge), bool(force), bool(partial)))
repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, wctx, p2))
m1, m2, ma = wctx.manifest(), p2.manifest(), pa.manifest()
copied = set(copy.values())
copied.update(movewithdir.values())
if '.hgsubstate' in m1:
# check whether sub state is modified
for s in sorted(wctx.substate):
if wctx.sub(s).dirty():
m1['.hgsubstate'] += "+"
break
aborts, prompts = [], []
# Compare manifests
fdiff = dicthelpers.diff(m1, m2)
flagsdiff = m1.flagsdiff(m2)
diff12 = dicthelpers.join(fdiff, flagsdiff)
for f, (n12, fl12) in diff12.iteritems():
if n12:
n1, n2 = n12
else: # file contents didn't change, but flags did
n1 = n2 = m1.get(f, None)
if n1 is None:
# Since n1 == n2, the file isn't present in m2 either. This
# means that the file was removed or deleted locally and
# removed remotely, but that residual entries remain in flags.
# This can happen in manifests generated by workingctx.
continue
if fl12:
fl1, fl2 = fl12
else: # flags didn't change, file contents did
fl1 = fl2 = m1.flags(f)
if partial and not partial(f):
continue
if n1 and n2:
fla = ma.flags(f)
nol = 'l' not in fl1 + fl2 + fla
a = ma.get(f, nullid)
if n2 == a and fl2 == fla:
pass # remote unchanged - keep local
elif n1 == a and fl1 == fla: # local unchanged - use remote
if n1 == n2: # optimization: keep local content
actions.append((f, "e", (fl2,), "update permissions"))
else:
actions.append((f, "g", (fl2,), "remote is newer"))
elif nol and n2 == a: # remote only changed 'x'
actions.append((f, "e", (fl2,), "update permissions"))
elif nol and n1 == a: # local only changed 'x'
actions.append((f, "g", (fl1,), "remote is newer"))
else: # both changed something
actions.append((f, "m", (f, f, False), "versions differ"))
elif f in copied: # files we'll deal with on m2 side
pass
elif n1 and f in movewithdir: # directory rename
f2 = movewithdir[f]
actions.append((f, "d", (None, f2, fl1),
"remote renamed directory to " + f2))
elif n1 and f in copy:
f2 = copy[f]
actions.append((f, "m", (f2, f, False),
"local copied/moved to " + f2))
elif n1 and f in ma: # clean, a different, no remote
if n1 != ma[f]:
prompts.append((f, "cd")) # prompt changed/deleted
elif n1[20:] == "a": # added, no remote
actions.append((f, "f", None, "remote deleted"))
else:
actions.append((f, "r", None, "other deleted"))
elif n2 and f in movewithdir:
f2 = movewithdir[f]
actions.append((None, "d", (f, f2, fl2),
"local renamed directory to " + f2))
elif n2 and f in copy:
f2 = copy[f]
if f2 in m2:
actions.append((f2, "m", (f, f, False),
"remote copied to " + f))
else:
actions.append((f2, "m", (f, f, True),
"remote moved to " + f))
elif n2 and f not in ma:
# local unknown, remote created: the logic is described by the
# following table:
#
# force branchmerge different | action
# n * n | get
# n * y | abort
# y n * | get
# y y n | get
# y y y | merge
#
# Checking whether the files are different is expensive, so we
# don't do that when we can avoid it.
if force and not branchmerge:
actions.append((f, "g", (fl2,), "remote created"))
else:
different = _checkunknownfile(repo, wctx, p2, f)
if force and branchmerge and different:
actions.append((f, "m", (f, f, False),
"remote differs from untracked local"))
elif not force and different:
aborts.append((f, "ud"))
else:
actions.append((f, "g", (fl2,), "remote created"))
elif n2 and n2 != ma[f]:
prompts.append((f, "dc")) # prompt deleted/changed
for f, m in sorted(aborts):
if m == "ud":
repo.ui.warn(_("%s: untracked file differs\n") % f)
else: assert False, m
if aborts:
raise util.Abort(_("untracked files in working directory differ "
"from files in requested revision"))
if not util.checkcase(repo.path):
# check collision between files only in p2 for clean update
if (not branchmerge and
(force or not wctx.dirty(missing=True, branch=False))):
_checkcollision(repo, m2, [], [])
else:
_checkcollision(repo, m1, actions, prompts)
for f, m in sorted(prompts):
if m == "cd":
if acceptremote:
actions.append((f, "r", None, "remote delete"))
elif repo.ui.promptchoice(
_("local changed %s which remote deleted\n"
"use (c)hanged version or (d)elete?") % f,
(_("&Changed"), _("&Delete")), 0):
actions.append((f, "r", None, "prompt delete"))
else:
actions.append((f, "a", None, "prompt keep"))
elif m == "dc":
if acceptremote:
actions.append((f, "g", (m2.flags(f),), "remote recreating"))
elif repo.ui.promptchoice(
_("remote changed %s which local deleted\n"
"use (c)hanged version or leave (d)eleted?") % f,
(_("&Changed"), _("&Deleted")), 0) == 0:
actions.append((f, "g", (m2.flags(f),), "prompt recreating"))
else: assert False, m
return actions
def actionkey(a):
return a[1] == "r" and -1 or 0, a
def getremove(repo, mctx, overwrite, args):
"""apply usually-non-interactive updates to the working directory
mctx is the context to be merged into the working copy
yields tuples for progress updates
"""
verbose = repo.ui.verbose
unlink = util.unlinkpath
wjoin = repo.wjoin
fctx = mctx.filectx
wwrite = repo.wwrite
audit = repo.wopener.audit
i = 0
for arg in args:
f = arg[0]
if arg[1] == 'r':
if verbose:
repo.ui.note(_("removing %s\n") % f)
audit(f)
try:
unlink(wjoin(f), ignoremissing=True)
except OSError, inst:
repo.ui.warn(_("update failed to remove %s: %s!\n") %
(f, inst.strerror))
else:
if verbose:
repo.ui.note(_("getting %s\n") % f)
wwrite(f, fctx(f).data(), arg[2][0])
if i == 100:
yield i, f
i = 0
i += 1
if i > 0:
yield i, f
def applyupdates(repo, actions, wctx, mctx, actx, overwrite):
"""apply the merge action list to the working directory
wctx is the working copy context
mctx is the context to be merged into the working copy
actx is the context of the common ancestor
Return a tuple of counts (updated, merged, removed, unresolved) that
describes how many files were affected by the update.
"""
updated, merged, removed, unresolved = 0, 0, 0, 0
ms = mergestate(repo)
ms.reset(wctx.p1().node())
moves = []
actions.sort(key=actionkey)
# prescan for merges
for a in actions:
f, m, args, msg = a
repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m))
if m == "m": # merge
f2, fd, move = args
if fd == '.hgsubstate': # merged internally
continue
repo.ui.debug(" preserving %s for resolve of %s\n" % (f, fd))
fcl = wctx[f]
fco = mctx[f2]
if mctx == actx: # backwards, use working dir parent as ancestor
if fcl.parents():
fca = fcl.p1()
else:
fca = repo.filectx(f, fileid=nullrev)
else:
fca = fcl.ancestor(fco, actx)
if not fca:
fca = repo.filectx(f, fileid=nullrev)
ms.add(fcl, fco, fca, fd)
if f != fd and move:
moves.append(f)
audit = repo.wopener.audit
# remove renamed files after safely stored
for f in moves:
if os.path.lexists(repo.wjoin(f)):
repo.ui.debug("removing %s\n" % f)
audit(f)
util.unlinkpath(repo.wjoin(f))
numupdates = len(actions)
workeractions = [a for a in actions if a[1] in 'gr']
updateactions = [a for a in workeractions if a[1] == 'g']
updated = len(updateactions)
removeactions = [a for a in workeractions if a[1] == 'r']
removed = len(removeactions)
actions = [a for a in actions if a[1] not in 'gr']
hgsub = [a[1] for a in workeractions if a[0] == '.hgsubstate']
if hgsub and hgsub[0] == 'r':
subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
z = 0
prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
removeactions)
for i, item in prog:
z += i
repo.ui.progress(_('updating'), z, item=item, total=numupdates,
unit=_('files'))
prog = worker.worker(repo.ui, 0.001, getremove, (repo, mctx, overwrite),
updateactions)
for i, item in prog:
z += i
repo.ui.progress(_('updating'), z, item=item, total=numupdates,
unit=_('files'))
if hgsub and hgsub[0] == 'g':
subrepo.submerge(repo, wctx, mctx, wctx, overwrite)
_updating = _('updating')
_files = _('files')
progress = repo.ui.progress
for i, a in enumerate(actions):
f, m, args, msg = a
progress(_updating, z + i + 1, item=f, total=numupdates, unit=_files)
if m == "m": # merge
f2, fd, move = args
if fd == '.hgsubstate': # subrepo states need updating
subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
overwrite)
continue
audit(fd)
r = ms.resolve(fd, wctx, mctx)
if r is not None and r > 0:
unresolved += 1
else:
if r is None:
updated += 1
else:
merged += 1
elif m == "d": # directory rename
f2, fd, flags = args
if f:
repo.ui.note(_("moving %s to %s\n") % (f, fd))
audit(f)
repo.wwrite(fd, wctx.filectx(f).data(), flags)
util.unlinkpath(repo.wjoin(f))
if f2:
repo.ui.note(_("getting %s to %s\n") % (f2, fd))
repo.wwrite(fd, mctx.filectx(f2).data(), flags)
updated += 1
elif m == "dr": # divergent renames
fl, = args
repo.ui.warn(_("note: possible conflict - %s was renamed "
"multiple times to:\n") % f)
for nf in fl:
repo.ui.warn(" %s\n" % nf)
elif m == "rd": # rename and delete
fl, = args
repo.ui.warn(_("note: possible conflict - %s was deleted "
"and renamed to:\n") % f)
for nf in fl:
repo.ui.warn(" %s\n" % nf)
elif m == "e": # exec
flags, = args
audit(f)
util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
updated += 1
ms.commit()
progress(_updating, None, total=numupdates, unit=_files)
return updated, merged, removed, unresolved
def calculateupdates(repo, tctx, mctx, ancestor, branchmerge, force, partial,
acceptremote=False):
"Calculate the actions needed to merge mctx into tctx"
actions = []
actions += manifestmerge(repo, tctx, mctx,
ancestor,
branchmerge, force,
partial, acceptremote)
if tctx.rev() is None:
actions += _forgetremoved(tctx, mctx, branchmerge)
return actions
def recordupdates(repo, actions, branchmerge):
"record merge actions to the dirstate"
for a in actions:
f, m, args, msg = a
if m == "r": # remove
if branchmerge:
repo.dirstate.remove(f)
else:
repo.dirstate.drop(f)
elif m == "a": # re-add
if not branchmerge:
repo.dirstate.add(f)
elif m == "f": # forget
repo.dirstate.drop(f)
elif m == "e": # exec change
repo.dirstate.normallookup(f)
elif m == "g": # get
if branchmerge:
repo.dirstate.otherparent(f)
else:
repo.dirstate.normal(f)
elif m == "m": # merge
f2, fd, move = args
if branchmerge:
# We've done a branch merge, mark this file as merged
# so that we properly record the merger later
repo.dirstate.merge(fd)
if f != f2: # copy/rename
if move:
repo.dirstate.remove(f)
if f != fd:
repo.dirstate.copy(f, fd)
else:
repo.dirstate.copy(f2, fd)
else:
# We've update-merged a locally modified file, so
# we set the dirstate to emulate a normal checkout
# of that file some time in the past. Thus our
# merge will appear as a normal local file
# modification.
if f2 == fd: # file not locally copied/moved
repo.dirstate.normallookup(fd)
if move:
repo.dirstate.drop(f)
elif m == "d": # directory rename
f2, fd, flag = args
if not f2 and f not in repo.dirstate:
# untracked file moved
continue
if branchmerge:
repo.dirstate.add(fd)
if f:
repo.dirstate.remove(f)
repo.dirstate.copy(f, fd)
if f2:
repo.dirstate.copy(f2, fd)
else:
repo.dirstate.normal(fd)
if f:
repo.dirstate.drop(f)
def update(repo, node, branchmerge, force, partial, ancestor=None,
mergeancestor=False):
"""
Perform a merge between the working directory and the given node
node = the node to update to, or None if unspecified
branchmerge = whether to merge between branches
force = whether to force branch merging or file overwriting
partial = a function to filter file lists (dirstate not updated)
mergeancestor = whether it is merging with an ancestor. If true,
we should accept the incoming changes for any prompts that occur.
If false, merging with an ancestor (fast-forward) is only allowed
between different named branches. This flag is used by rebase extension
as a temporary fix and should be avoided in general.
The table below shows all the behaviors of the update command
given the -c and -C or no options, whether the working directory
is dirty, whether a revision is specified, and the relationship of
the parent rev to the target rev (linear, on the same named
branch, or on another named branch).
This logic is tested by test-update-branches.t.
-c -C dirty rev | linear same cross
n n n n | ok (1) x
n n n y | ok ok ok
n n y * | merge (2) (2)
n y * * | --- discard ---
y n y * | --- (3) ---
y n n * | --- ok ---
y y * * | --- (4) ---
x = can't happen
* = don't-care
1 = abort: crosses branches (use 'hg merge' or 'hg update -c')
2 = abort: crosses branches (use 'hg merge' to merge or
use 'hg update -C' to discard changes)
3 = abort: uncommitted local changes
4 = incompatible options (checked in commands.py)
Return the same tuple as applyupdates().
"""
onode = node
wlock = repo.wlock()
try:
wc = repo[None]
if node is None:
# tip of current branch
try:
node = repo.branchtip(wc.branch())
except error.RepoLookupError:
if wc.branch() == "default": # no default branch!
node = repo.lookup("tip") # update to tip
else:
raise util.Abort(_("branch %s not found") % wc.branch())
overwrite = force and not branchmerge
pl = wc.parents()
p1, p2 = pl[0], repo[node]
if ancestor:
pa = repo[ancestor]
else:
pa = p1.ancestor(p2)
fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2)
### check phase
if not overwrite and len(pl) > 1:
raise util.Abort(_("outstanding uncommitted merges"))
if branchmerge:
if pa == p2:
raise util.Abort(_("merging with a working directory ancestor"
" has no effect"))
elif pa == p1:
if not mergeancestor and p1.branch() == p2.branch():
raise util.Abort(_("nothing to merge"),
hint=_("use 'hg update' "
"or check 'hg heads'"))
if not force and (wc.files() or wc.deleted()):
raise util.Abort(_("outstanding uncommitted changes"),
hint=_("use 'hg status' to list changes"))
for s in sorted(wc.substate):
if wc.sub(s).dirty():
raise util.Abort(_("outstanding uncommitted changes in "
"subrepository '%s'") % s)
elif not overwrite:
if pa not in (p1, p2): # nolinear
dirty = wc.dirty(missing=True)
if dirty or onode is None:
# Branching is a bit strange to ensure we do the minimal
# amount of call to obsolete.background.
foreground = obsolete.foreground(repo, [p1.node()])
# note: the <node> variable contains a random identifier
if repo[node].node() in foreground:
pa = p1 # allow updating to successors
elif dirty:
msg = _("crosses branches (merge branches or use"
" --clean to discard changes)")
raise util.Abort(msg)
else: # node is none
msg = _("crosses branches (merge branches or update"
" --check to force update)")
raise util.Abort(msg)
else:
# Allow jumping branches if clean and specific rev given
pa = p1
### calculate phase
actions = calculateupdates(repo, wc, p2, pa,
branchmerge, force, partial, mergeancestor)
### apply phase
if not branchmerge: # just jump to the new rev
fp1, fp2, xp1, xp2 = fp2, nullid, xp2, ''
if not partial:
repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2)
stats = applyupdates(repo, actions, wc, p2, pa, overwrite)
if not partial:
repo.setparents(fp1, fp2)
recordupdates(repo, actions, branchmerge)
if not branchmerge:
repo.dirstate.setbranch(p2.branch())
finally:
wlock.release()
if not partial:
repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3])
return stats
| apache-2.0 |
amisrs/one-eighty | venv2/lib/python2.7/site-packages/sqlalchemy/orm/strategy_options.py | 19 | 35455 | # Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
"""
from .interfaces import MapperOption, PropComparator
from .. import util
from ..sql.base import _generative, Generative
from .. import exc as sa_exc, inspect
from .base import _is_aliased_class, _class_to_mapper
from . import util as orm_util
from .path_registry import PathRegistry, TokenRegistry, \
_WILDCARD_TOKEN, _DEFAULT_TOKEN
class Load(Generative, MapperOption):
"""Represents loader options which modify the state of a
:class:`.Query` in order to affect how various mapped attributes are
loaded.
The :class:`.Load` object is in most cases used implicitly behind the
scenes when one makes use of a query option like :func:`.joinedload`,
:func:`.defer`, or similar. However, the :class:`.Load` object
can also be used directly, and in some cases can be useful.
To use :class:`.Load` directly, instantiate it with the target mapped
class as the argument. This style of usage is
useful when dealing with a :class:`.Query` that has multiple entities::
myopt = Load(MyClass).joinedload("widgets")
The above ``myopt`` can now be used with :meth:`.Query.options`, where it
will only take effect for the ``MyClass`` entity::
session.query(MyClass, MyOtherClass).options(myopt)
One case where :class:`.Load` is useful as public API is when specifying
"wildcard" options that only take effect for a certain class::
session.query(Order).options(Load(Order).lazyload('*'))
Above, all relationships on ``Order`` will be lazy-loaded, but other
attributes on those descendant objects will load using their normal
loader strategy.
.. seealso::
:ref:`loading_toplevel`
"""
def __init__(self, entity):
insp = inspect(entity)
self.path = insp._path_registry
# note that this .context is shared among all descendant
# Load objects
self.context = {}
self.local_opts = {}
@classmethod
def for_existing_path(cls, path):
load = cls.__new__(cls)
load.path = path
load.context = {}
load.local_opts = {}
return load
def _generate(self):
cloned = super(Load, self)._generate()
cloned.local_opts = {}
return cloned
is_opts_only = False
strategy = None
propagate_to_loaders = False
def process_query(self, query):
self._process(query, True)
def process_query_conditionally(self, query):
self._process(query, False)
def _process(self, query, raiseerr):
current_path = query._current_path
if current_path:
for (token, start_path), loader in self.context.items():
chopped_start_path = self._chop_path(start_path, current_path)
if chopped_start_path is not None:
query._attributes[(token, chopped_start_path)] = loader
else:
query._attributes.update(self.context)
def _generate_path(self, path, attr, wildcard_key, raiseerr=True):
if raiseerr and not path.has_entity:
if isinstance(path, TokenRegistry):
raise sa_exc.ArgumentError(
"Wildcard token cannot be followed by another entity")
else:
raise sa_exc.ArgumentError(
"Attribute '%s' of entity '%s' does not "
"refer to a mapped entity" %
(path.prop.key, path.parent.entity)
)
if isinstance(attr, util.string_types):
default_token = attr.endswith(_DEFAULT_TOKEN)
if attr.endswith(_WILDCARD_TOKEN) or default_token:
if default_token:
self.propagate_to_loaders = False
if wildcard_key:
attr = "%s:%s" % (wildcard_key, attr)
return path.token(attr)
try:
# use getattr on the class to work around
# synonyms, hybrids, etc.
attr = getattr(path.entity.class_, attr)
except AttributeError:
if raiseerr:
raise sa_exc.ArgumentError(
"Can't find property named '%s' on the "
"mapped entity %s in this Query. " % (
attr, path.entity)
)
else:
return None
else:
attr = attr.property
path = path[attr]
else:
prop = attr.property
if not prop.parent.common_parent(path.mapper):
if raiseerr:
raise sa_exc.ArgumentError(
"Attribute '%s' does not "
"link from element '%s'" % (attr, path.entity))
else:
return None
if getattr(attr, '_of_type', None):
ac = attr._of_type
ext_info = inspect(ac)
path_element = ext_info.mapper
existing = path.entity_path[prop].get(
self.context, "path_with_polymorphic")
if not ext_info.is_aliased_class:
ac = orm_util.with_polymorphic(
ext_info.mapper.base_mapper,
ext_info.mapper, aliased=True,
_use_mapper_path=True,
_existing_alias=existing)
path.entity_path[prop].set(
self.context, "path_with_polymorphic", inspect(ac))
path = path[prop][path_element]
else:
path = path[prop]
if path.has_entity:
path = path.entity_path
return path
def __str__(self):
return "Load(strategy=%r)" % (self.strategy, )
def _coerce_strat(self, strategy):
if strategy is not None:
strategy = tuple(sorted(strategy.items()))
return strategy
@_generative
def set_relationship_strategy(
self, attr, strategy, propagate_to_loaders=True):
strategy = self._coerce_strat(strategy)
self.propagate_to_loaders = propagate_to_loaders
# if the path is a wildcard, this will set propagate_to_loaders=False
self.path = self._generate_path(self.path, attr, "relationship")
self.strategy = strategy
if strategy is not None:
self._set_path_strategy()
@_generative
def set_column_strategy(self, attrs, strategy, opts=None, opts_only=False):
strategy = self._coerce_strat(strategy)
for attr in attrs:
path = self._generate_path(self.path, attr, "column")
cloned = self._generate()
cloned.strategy = strategy
cloned.path = path
cloned.propagate_to_loaders = True
if opts:
cloned.local_opts.update(opts)
if opts_only:
cloned.is_opts_only = True
cloned._set_path_strategy()
def _set_for_path(self, context, path, replace=True, merge_opts=False):
if merge_opts or not replace:
existing = path.get(self.context, "loader")
if existing:
if merge_opts:
existing.local_opts.update(self.local_opts)
else:
path.set(context, "loader", self)
else:
existing = path.get(self.context, "loader")
path.set(context, "loader", self)
if existing and existing.is_opts_only:
self.local_opts.update(existing.local_opts)
def _set_path_strategy(self):
if self.path.has_entity:
effective_path = self.path.parent
else:
effective_path = self.path
self._set_for_path(
self.context, effective_path, replace=True,
merge_opts=self.is_opts_only)
def __getstate__(self):
d = self.__dict__.copy()
d["path"] = self.path.serialize()
return d
def __setstate__(self, state):
self.__dict__.update(state)
self.path = PathRegistry.deserialize(self.path)
def _chop_path(self, to_chop, path):
i = -1
for i, (c_token, p_token) in enumerate(zip(to_chop, path.path)):
if isinstance(c_token, util.string_types):
# TODO: this is approximated from the _UnboundLoad
# version and probably has issues, not fully covered.
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
return to_chop
elif c_token != 'relationship:%s' % (_WILDCARD_TOKEN,) and \
c_token != p_token.key:
return None
if c_token is p_token:
continue
else:
return None
return to_chop[i + 1:]
class _UnboundLoad(Load):
"""Represent a loader option that isn't tied to a root entity.
The loader option will produce an entity-linked :class:`.Load`
object when it is passed :meth:`.Query.options`.
This provides compatibility with the traditional system
of freestanding options, e.g. ``joinedload('x.y.z')``.
"""
def __init__(self):
self.path = ()
self._to_bind = set()
self.local_opts = {}
_is_chain_link = False
def _set_path_strategy(self):
self._to_bind.add(self)
def _generate_path(self, path, attr, wildcard_key):
if wildcard_key and isinstance(attr, util.string_types) and \
attr in (_WILDCARD_TOKEN, _DEFAULT_TOKEN):
if attr == _DEFAULT_TOKEN:
self.propagate_to_loaders = False
attr = "%s:%s" % (wildcard_key, attr)
return path + (attr, )
def __getstate__(self):
d = self.__dict__.copy()
d['path'] = ret = []
for token in util.to_list(self.path):
if isinstance(token, PropComparator):
ret.append((token._parentmapper.class_, token.key))
else:
ret.append(token)
return d
def __setstate__(self, state):
ret = []
for key in state['path']:
if isinstance(key, tuple):
cls, propkey = key
ret.append(getattr(cls, propkey))
else:
ret.append(key)
state['path'] = tuple(ret)
self.__dict__ = state
def _process(self, query, raiseerr):
for val in self._to_bind:
val._bind_loader(query, query._attributes, raiseerr)
@classmethod
def _from_keys(cls, meth, keys, chained, kw):
opt = _UnboundLoad()
def _split_key(key):
if isinstance(key, util.string_types):
# coerce fooload('*') into "default loader strategy"
if key == _WILDCARD_TOKEN:
return (_DEFAULT_TOKEN, )
# coerce fooload(".*") into "wildcard on default entity"
elif key.startswith("." + _WILDCARD_TOKEN):
key = key[1:]
return key.split(".")
else:
return (key,)
all_tokens = [token for key in keys for token in _split_key(key)]
for token in all_tokens[0:-1]:
if chained:
opt = meth(opt, token, **kw)
else:
opt = opt.defaultload(token)
opt._is_chain_link = True
opt = meth(opt, all_tokens[-1], **kw)
opt._is_chain_link = False
return opt
def _chop_path(self, to_chop, path):
i = -1
for i, (c_token, (p_mapper, p_prop)) in enumerate(
zip(to_chop, path.pairs())):
if isinstance(c_token, util.string_types):
if i == 0 and c_token.endswith(':' + _DEFAULT_TOKEN):
return to_chop
elif c_token != 'relationship:%s' % (
_WILDCARD_TOKEN,) and c_token != p_prop.key:
return None
elif isinstance(c_token, PropComparator):
if c_token.property is not p_prop:
return None
else:
i += 1
return to_chop[i:]
def _bind_loader(self, query, context, raiseerr):
start_path = self.path
# _current_path implies we're in a
# secondary load with an existing path
current_path = query._current_path
if current_path:
start_path = self._chop_path(start_path, current_path)
if not start_path:
return None
token = start_path[0]
if isinstance(token, util.string_types):
entity = self._find_entity_basestring(query, token, raiseerr)
elif isinstance(token, PropComparator):
prop = token.property
entity = self._find_entity_prop_comparator(
query,
prop.key,
token._parententity,
raiseerr)
else:
raise sa_exc.ArgumentError(
"mapper option expects "
"string key or list of attributes")
if not entity:
return
path_element = entity.entity_zero
# transfer our entity-less state into a Load() object
# with a real entity path.
loader = Load(path_element)
loader.context = context
loader.strategy = self.strategy
loader.is_opts_only = self.is_opts_only
path = loader.path
for token in start_path:
loader.path = path = loader._generate_path(
loader.path, token, None, raiseerr)
if path is None:
return
loader.local_opts.update(self.local_opts)
if loader.path.has_entity:
effective_path = loader.path.parent
else:
effective_path = loader.path
# prioritize "first class" options over those
# that were "links in the chain", e.g. "x" and "y" in
# someload("x.y.z") versus someload("x") / someload("x.y")
if effective_path.is_token:
for path in effective_path.generate_for_superclasses():
loader._set_for_path(
context, path,
replace=not self._is_chain_link,
merge_opts=self.is_opts_only)
else:
loader._set_for_path(
context, effective_path,
replace=not self._is_chain_link,
merge_opts=self.is_opts_only)
def _find_entity_prop_comparator(self, query, token, mapper, raiseerr):
if _is_aliased_class(mapper):
searchfor = mapper
else:
searchfor = _class_to_mapper(mapper)
for ent in query._mapper_entities:
if ent.corresponds_to(searchfor):
return ent
else:
if raiseerr:
if not list(query._mapper_entities):
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
% (token, )
)
else:
raise sa_exc.ArgumentError(
"Can't find property '%s' on any entity "
"specified in this Query. Note the full path "
"from root (%s) to target entity must be specified."
% (token, ",".join(str(x) for
x in query._mapper_entities))
)
else:
return None
def _find_entity_basestring(self, query, token, raiseerr):
if token.endswith(':' + _WILDCARD_TOKEN):
if len(list(query._mapper_entities)) != 1:
if raiseerr:
raise sa_exc.ArgumentError(
"Wildcard loader can only be used with exactly "
"one entity. Use Load(ent) to specify "
"specific entities.")
elif token.endswith(_DEFAULT_TOKEN):
raiseerr = False
for ent in query._mapper_entities:
# return only the first _MapperEntity when searching
# based on string prop name. Ideally object
# attributes are used to specify more exactly.
return ent
else:
if raiseerr:
raise sa_exc.ArgumentError(
"Query has only expression-based entities - "
"can't find property named '%s'."
% (token, )
)
else:
return None
class loader_option(object):
def __init__(self):
pass
def __call__(self, fn):
self.name = name = fn.__name__
self.fn = fn
if hasattr(Load, name):
raise TypeError("Load class already has a %s method." % (name))
setattr(Load, name, fn)
return self
def _add_unbound_fn(self, fn):
self._unbound_fn = fn
fn_doc = self.fn.__doc__
self.fn.__doc__ = """Produce a new :class:`.Load` object with the
:func:`.orm.%(name)s` option applied.
See :func:`.orm.%(name)s` for usage examples.
""" % {"name": self.name}
fn.__doc__ = fn_doc
return self
def _add_unbound_all_fn(self, fn):
self._unbound_all_fn = fn
fn.__doc__ = """Produce a standalone "all" option for :func:`.orm.%(name)s`.
.. deprecated:: 0.9.0
The "_all()" style is replaced by method chaining, e.g.::
session.query(MyClass).options(
%(name)s("someattribute").%(name)s("anotherattribute")
)
""" % {"name": self.name}
return self
@loader_option()
def contains_eager(loadopt, attr, alias=None):
r"""Indicate that the given attribute should be eagerly loaded from
columns stated manually in the query.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
The option is used in conjunction with an explicit join that loads
the desired rows, i.e.::
sess.query(Order).\
join(Order.user).\
options(contains_eager(Order.user))
The above query would join from the ``Order`` entity to its related
``User`` entity, and the returned ``Order`` objects would have the
``Order.user`` attribute pre-populated.
:func:`contains_eager` also accepts an `alias` argument, which is the
string name of an alias, an :func:`~sqlalchemy.sql.expression.alias`
construct, or an :func:`~sqlalchemy.orm.aliased` construct. Use this when
the eagerly-loaded rows are to come from an aliased table::
user_alias = aliased(User)
sess.query(Order).\
join((user_alias, Order.user)).\
options(contains_eager(Order.user, alias=user_alias))
.. seealso::
:ref:`loading_toplevel`
:ref:`contains_eager`
"""
if alias is not None:
if not isinstance(alias, str):
info = inspect(alias)
alias = info.selectable
cloned = loadopt.set_relationship_strategy(
attr,
{"lazy": "joined"},
propagate_to_loaders=False
)
cloned.local_opts['eager_from_alias'] = alias
return cloned
@contains_eager._add_unbound_fn
def contains_eager(*keys, **kw):
return _UnboundLoad()._from_keys(
_UnboundLoad.contains_eager, keys, True, kw)
@loader_option()
def load_only(loadopt, *attrs):
"""Indicate that for a particular entity, only the given list
of column-based attribute names should be loaded; all others will be
deferred.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
Example - given a class ``User``, load only the ``name`` and ``fullname``
attributes::
session.query(User).options(load_only("name", "fullname"))
Example - given a relationship ``User.addresses -> Address``, specify
subquery loading for the ``User.addresses`` collection, but on each
``Address`` object load only the ``email_address`` attribute::
session.query(User).options(
subqueryload("addresses").load_only("email_address")
)
For a :class:`.Query` that has multiple entities, the lead entity can be
specifically referred to using the :class:`.Load` constructor::
session.query(User, Address).join(User.addresses).options(
Load(User).load_only("name", "fullname"),
Load(Address).load_only("email_addres")
)
.. versionadded:: 0.9.0
"""
cloned = loadopt.set_column_strategy(
attrs,
{"deferred": False, "instrument": True}
)
cloned.set_column_strategy("*",
{"deferred": True, "instrument": True},
{"undefer_pks": True})
return cloned
@load_only._add_unbound_fn
def load_only(*attrs):
return _UnboundLoad().load_only(*attrs)
@loader_option()
def joinedload(loadopt, attr, innerjoin=None):
"""Indicate that the given attribute should be loaded using joined
eager loading.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
examples::
# joined-load the "orders" collection on "User"
query(User).options(joinedload(User.orders))
# joined-load Order.items and then Item.keywords
query(Order).options(
joinedload(Order.items).joinedload(Item.keywords))
# lazily load Order.items, but when Items are loaded,
# joined-load the keywords collection
query(Order).options(
lazyload(Order.items).joinedload(Item.keywords))
:param innerjoin: if ``True``, indicates that the joined eager load should
use an inner join instead of the default of left outer join::
query(Order).options(joinedload(Order.user, innerjoin=True))
In order to chain multiple eager joins together where some may be
OUTER and others INNER, right-nested joins are used to link them::
query(A).options(
joinedload(A.bs, innerjoin=False).
joinedload(B.cs, innerjoin=True)
)
The above query, linking A.bs via "outer" join and B.cs via "inner" join
would render the joins as "a LEFT OUTER JOIN (b JOIN c)". When using
older versions of SQLite (< 3.7.16), this form of JOIN is translated to
use full subqueries as this syntax is otherwise not directly supported.
The ``innerjoin`` flag can also be stated with the term ``"unnested"``.
This indicates that an INNER JOIN should be used, *unless* the join
is linked to a LEFT OUTER JOIN to the left, in which case it
will render as LEFT OUTER JOIN. For example, supposing ``A.bs``
is an outerjoin::
query(A).options(
joinedload(A.bs).
joinedload(B.cs, innerjoin="unnested")
)
The above join will render as "a LEFT OUTER JOIN b LEFT OUTER JOIN c",
rather than as "a LEFT OUTER JOIN (b JOIN c)".
.. note:: The "unnested" flag does **not** affect the JOIN rendered
from a many-to-many association table, e.g. a table configured
as :paramref:`.relationship.secondary`, to the target table; for
correctness of results, these joins are always INNER and are
therefore right-nested if linked to an OUTER join.
.. versionchanged:: 1.0.0 ``innerjoin=True`` now implies
``innerjoin="nested"``, whereas in 0.9 it implied
``innerjoin="unnested"``. In order to achieve the pre-1.0 "unnested"
inner join behavior, use the value ``innerjoin="unnested"``.
See :ref:`migration_3008`.
.. note::
The joins produced by :func:`.orm.joinedload` are **anonymously
aliased**. The criteria by which the join proceeds cannot be
modified, nor can the :class:`.Query` refer to these joins in any way,
including ordering. See :ref:`zen_of_eager_loading` for further
detail.
To produce a specific SQL JOIN which is explicitly available, use
:meth:`.Query.join`. To combine explicit JOINs with eager loading
of collections, use :func:`.orm.contains_eager`; see
:ref:`contains_eager`.
.. seealso::
:ref:`loading_toplevel`
:ref:`joined_eager_loading`
"""
loader = loadopt.set_relationship_strategy(attr, {"lazy": "joined"})
if innerjoin is not None:
loader.local_opts['innerjoin'] = innerjoin
return loader
@joinedload._add_unbound_fn
def joinedload(*keys, **kw):
return _UnboundLoad._from_keys(
_UnboundLoad.joinedload, keys, False, kw)
@joinedload._add_unbound_all_fn
def joinedload_all(*keys, **kw):
return _UnboundLoad._from_keys(
_UnboundLoad.joinedload, keys, True, kw)
@loader_option()
def subqueryload(loadopt, attr):
"""Indicate that the given attribute should be loaded using
subquery eager loading.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
examples::
# subquery-load the "orders" collection on "User"
query(User).options(subqueryload(User.orders))
# subquery-load Order.items and then Item.keywords
query(Order).options(subqueryload(Order.items).subqueryload(Item.keywords))
# lazily load Order.items, but when Items are loaded,
# subquery-load the keywords collection
query(Order).options(lazyload(Order.items).subqueryload(Item.keywords))
.. seealso::
:ref:`loading_toplevel`
:ref:`subquery_eager_loading`
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "subquery"})
@subqueryload._add_unbound_fn
def subqueryload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, False, {})
@subqueryload._add_unbound_all_fn
def subqueryload_all(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.subqueryload, keys, True, {})
@loader_option()
def lazyload(loadopt, attr):
"""Indicate that the given attribute should be loaded using "lazy"
loading.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
.. seealso::
:ref:`loading_toplevel`
:ref:`lazy_loading`
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "select"})
@lazyload._add_unbound_fn
def lazyload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, False, {})
@lazyload._add_unbound_all_fn
def lazyload_all(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.lazyload, keys, True, {})
@loader_option()
def immediateload(loadopt, attr):
"""Indicate that the given attribute should be loaded using
an immediate load with a per-attribute SELECT statement.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
.. seealso::
:ref:`loading_toplevel`
"""
loader = loadopt.set_relationship_strategy(attr, {"lazy": "immediate"})
return loader
@immediateload._add_unbound_fn
def immediateload(*keys):
return _UnboundLoad._from_keys(
_UnboundLoad.immediateload, keys, False, {})
@loader_option()
def noload(loadopt, attr):
"""Indicate that the given relationship attribute should remain unloaded.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
:func:`.orm.noload` applies to :func:`.relationship` attributes; for
column-based attributes, see :func:`.orm.defer`.
.. seealso::
:ref:`loading_toplevel`
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "noload"})
@noload._add_unbound_fn
def noload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.noload, keys, False, {})
@loader_option()
def raiseload(loadopt, attr, sql_only=False):
"""Indicate that the given relationship attribute should disallow lazy loads.
A relationship attribute configured with :func:`.orm.raiseload` will
raise an :exc:`~sqlalchemy.exc.InvalidRequestError` upon access. The
typical way this is useful is when an application is attempting to ensure
that all relationship attributes that are accessed in a particular context
would have been already loaded via eager loading. Instead of having
to read through SQL logs to ensure lazy loads aren't occurring, this
strategy will cause them to raise immediately.
:param sql_only: if True, raise only if the lazy load would emit SQL,
but not if it is only checking the identity map, or determining that
the related value should just be None due to missing keys. When False,
the strategy will raise for all varieties of lazyload.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
:func:`.orm.raiseload` applies to :func:`.relationship` attributes only.
.. versionadded:: 1.1
.. seealso::
:ref:`loading_toplevel`
:ref:`prevent_lazy_with_raiseload`
"""
return loadopt.set_relationship_strategy(
attr, {"lazy": "raise_on_sql" if sql_only else "raise"})
@raiseload._add_unbound_fn
def raiseload(*keys, **kw):
return _UnboundLoad._from_keys(_UnboundLoad.raiseload, keys, False, kw)
@loader_option()
def defaultload(loadopt, attr):
"""Indicate an attribute should load using its default loader style.
This method is used to link to other loader options further into
a chain of attributes without altering the loader style of the links
along the chain. For example, to set joined eager loading for an
element of an element::
session.query(MyClass).options(
defaultload(MyClass.someattribute).
joinedload(MyOtherClass.someotherattribute)
)
:func:`.defaultload` is also useful for setting column-level options
on a related class, namely that of :func:`.defer` and :func:`.undefer`::
session.query(MyClass).options(
defaultload(MyClass.someattribute).
defer("some_column").
undefer("some_other_column")
)
.. seealso::
:ref:`relationship_loader_options`
:ref:`deferred_loading_w_multiple`
"""
return loadopt.set_relationship_strategy(
attr,
None
)
@defaultload._add_unbound_fn
def defaultload(*keys):
return _UnboundLoad._from_keys(_UnboundLoad.defaultload, keys, False, {})
@loader_option()
def defer(loadopt, key):
r"""Indicate that the given column-oriented attribute should be deferred, e.g.
not loaded until accessed.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
e.g.::
from sqlalchemy.orm import defer
session.query(MyClass).options(
defer("attribute_one"),
defer("attribute_two"))
session.query(MyClass).options(
defer(MyClass.attribute_one),
defer(MyClass.attribute_two))
To specify a deferred load of an attribute on a related class,
the path can be specified one token at a time, specifying the loading
style for each link along the chain. To leave the loading style
for a link unchanged, use :func:`.orm.defaultload`::
session.query(MyClass).options(defaultload("someattr").defer("some_column"))
A :class:`.Load` object that is present on a certain path can have
:meth:`.Load.defer` called multiple times, each will operate on the same
parent entity::
session.query(MyClass).options(
defaultload("someattr").
defer("some_column").
defer("some_other_column").
defer("another_column")
)
:param key: Attribute to be deferred.
:param \*addl_attrs: Deprecated; this option supports the old 0.8 style
of specifying a path as a series of attributes, which is now superseded
by the method-chained style.
.. seealso::
:ref:`deferred`
:func:`.orm.undefer`
"""
return loadopt.set_column_strategy(
(key, ),
{"deferred": True, "instrument": True}
)
@defer._add_unbound_fn
def defer(key, *addl_attrs):
return _UnboundLoad._from_keys(
_UnboundLoad.defer, (key, ) + addl_attrs, False, {})
@loader_option()
def undefer(loadopt, key):
r"""Indicate that the given column-oriented attribute should be undeferred,
e.g. specified within the SELECT statement of the entity as a whole.
The column being undeferred is typically set up on the mapping as a
:func:`.deferred` attribute.
This function is part of the :class:`.Load` interface and supports
both method-chained and standalone operation.
Examples::
# undefer two columns
session.query(MyClass).options(undefer("col1"), undefer("col2"))
# undefer all columns specific to a single class using Load + *
session.query(MyClass, MyOtherClass).options(
Load(MyClass).undefer("*"))
:param key: Attribute to be undeferred.
:param \*addl_attrs: Deprecated; this option supports the old 0.8 style
of specifying a path as a series of attributes, which is now superseded
by the method-chained style.
.. seealso::
:ref:`deferred`
:func:`.orm.defer`
:func:`.orm.undefer_group`
"""
return loadopt.set_column_strategy(
(key, ),
{"deferred": False, "instrument": True}
)
@undefer._add_unbound_fn
def undefer(key, *addl_attrs):
return _UnboundLoad._from_keys(
_UnboundLoad.undefer, (key, ) + addl_attrs, False, {})
@loader_option()
def undefer_group(loadopt, name):
"""Indicate that columns within the given deferred group name should be
undeferred.
The columns being undeferred are set up on the mapping as
:func:`.deferred` attributes and include a "group" name.
E.g::
session.query(MyClass).options(undefer_group("large_attrs"))
To undefer a group of attributes on a related entity, the path can be
spelled out using relationship loader options, such as
:func:`.orm.defaultload`::
session.query(MyClass).options(
defaultload("someattr").undefer_group("large_attrs"))
.. versionchanged:: 0.9.0 :func:`.orm.undefer_group` is now specific to a
particiular entity load path.
.. seealso::
:ref:`deferred`
:func:`.orm.defer`
:func:`.orm.undefer`
"""
return loadopt.set_column_strategy(
"*",
None,
{"undefer_group_%s" % name: True},
opts_only=True
)
@undefer_group._add_unbound_fn
def undefer_group(name):
return _UnboundLoad().undefer_group(name)
| mit |
CitrineInformatics/python-citrination-client | citrination_client/models/columns/alloy_composition.py | 1 | 2265 | from citrination_client.models.columns.base import BaseColumn
class AlloyCompositionColumn(BaseColumn):
"""
An alloy composition column configuration for a data view. Parameterized
with the basic column options, plus the balance element for the column
and the basis value for the composition.
"""
TYPE = "Alloy composition"
def __init__(self, name, role, balance_element, group_by_key=False, units=None, basis=100.0):
"""
Constructor.
:param name: The name of the column
:type name: str
:param role: The role the column will play in machine learning:
"Input"
"Output"
"Latent Variable"
"Ignore"
:type role: str
:param group_by_key: Whether or not this column should be used for
grouping during cross validation
:type group_by_key: bool
:param units: Optionally, the units for the column
:type units: str
:param balance_element: The element making up the balance in the
composition
:type balance_element: str
:param basis: The total amount of composition when deciding how to fill
the balance
:type basis: float
"""
super(AlloyCompositionColumn, self).__init__(name=name,
role=role,
group_by_key=group_by_key,
units=units)
self._balance_element = balance_element
self._basis = basis
def build_options(self):
return {
"balance_element": self.balance_element,
"basis": self.basis
}
@property
def basis(self):
return self._basis
@basis.setter
def basis(self, value):
self._basis = value
@basis.deleter
def basis(self):
self._basis = None
@property
def balance_element(self):
return self._balance_element
@balance_element.setter
def balance_element(self, value):
self._balance_element = value
@balance_element.deleter
def balance_element(self):
self._balance_element = None
| apache-2.0 |
wanghaven/nupic | src/nupic/database/Connection.py | 49 | 22041 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import logging
import platform
import traceback
from DBUtils import SteadyDB
from DBUtils.PooledDB import PooledDB
import pymysql
from nupic.support.configuration import Configuration
_MODULE_NAME = "nupic.database.Connection"
g_max_concurrency = None
g_max_concurrency_raise_exception = False
""" This flag controls a diagnostic feature for debugging unexpected concurrency
in acquiring ConnectionWrapper instances.
The value None (default) disables this feature.
enableConcurrencyChecks() and disableConcurrencyChecks() are the public API
functions for controlling this diagnostic feature.
When g_max_concurrency is exceeded, this module will log useful info (backtraces
of concurrent connection acquisitions). If g_max_concurrency_raise_exception is
true, it will also raise ConcurrencyExceededError with helpful information.
"""
class ConcurrencyExceededError(Exception):
""" This exception is raised when g_max_concurrency is exceeded """
pass
def enableConcurrencyChecks(maxConcurrency, raiseException=True):
""" Enable the diagnostic feature for debugging unexpected concurrency in
acquiring ConnectionWrapper instances.
NOTE: This MUST be done early in your application's execution, BEFORE any
accesses to ConnectionFactory or connection policies from your application
(including imports and sub-imports of your app).
Parameters:
----------------------------------------------------------------
maxConcurrency: A non-negative integer that represents the maximum expected
number of outstanding connections. When this value is
exceeded, useful information will be logged and, depending
on the value of the raiseException arg,
ConcurrencyExceededError may be raised.
raiseException: If true, ConcurrencyExceededError will be raised when
maxConcurrency is exceeded.
"""
global g_max_concurrency, g_max_concurrency_raise_exception
assert maxConcurrency >= 0
g_max_concurrency = maxConcurrency
g_max_concurrency_raise_exception = raiseException
return
def disableConcurrencyChecks():
global g_max_concurrency, g_max_concurrency_raise_exception
g_max_concurrency = None
g_max_concurrency_raise_exception = False
return
class ConnectionFactory(object):
""" Database connection factory.
WARNING: Minimize the scope of connection ownership to cover
only the execution of SQL statements in order to avoid creating multiple
outstanding SQL connections in gevent-based apps (e.g.,
ProductionWorker) when polling code that calls timer.sleep()
executes in the scope of an outstanding SQL connection, allowing a
context switch to another greenlet that may also acquire an SQL connection.
This is highly undesirable because SQL/RDS servers allow a limited number
of connections. So, release connections before calling into any other code.
Since connections are pooled by default, the overhead of calling
ConnectionFactory.get() is insignificant.
Usage Examples:
# Add Context Manager (with ...) support for Jython/Python 2.5.x, if needed
from __future__ import with_statement
example1 (preferred):
with ConnectionFactory.get() as conn:
conn.cursor.execute("SELECT ...")
example2 (if 'with' statement can't be used for some reason):
conn = ConnectionFactory.get()
try:
conn.cursor.execute("SELECT ...")
finally:
conn.release()
"""
@classmethod
def get(cls):
""" Acquire a ConnectionWrapper instance that represents a connection
to the SQL server per nupic.cluster.database.* configuration settings.
NOTE: caller is responsible for calling the ConnectionWrapper instance's
release() method after using the connection in order to release resources.
Better yet, use the returned ConnectionWrapper instance in a Context Manager
statement for automatic invocation of release():
Example:
# If using Jython 2.5.x, first import with_statement at the very top of
your script (don't need this import for Jython/Python 2.6.x and later):
from __future__ import with_statement
# Then:
from nupic.database.Connection import ConnectionFactory
# Then use it like this
with ConnectionFactory.get() as conn:
conn.cursor.execute("SELECT ...")
conn.cursor.fetchall()
conn.cursor.execute("INSERT ...")
WARNING: DO NOT close the underlying connection or cursor as it may be
shared by other modules in your process. ConnectionWrapper's release()
method will do the right thing.
Parameters:
----------------------------------------------------------------
retval: A ConnectionWrapper instance. NOTE: Caller is responsible
for releasing resources as described above.
"""
if cls._connectionPolicy is None:
logger = _getLogger(cls)
logger.info("Creating db connection policy via provider %r",
cls._connectionPolicyInstanceProvider)
cls._connectionPolicy = cls._connectionPolicyInstanceProvider()
logger.debug("Created connection policy: %r", cls._connectionPolicy)
return cls._connectionPolicy.acquireConnection()
@classmethod
def close(cls):
""" Close ConnectionFactory's connection policy. Typically, there is no need
to call this method as the system will automatically close the connections
when the process exits.
NOTE: This method should be used with CAUTION. It is designed to be
called ONLY by the code responsible for startup and shutdown of the process
since it closes the connection(s) used by ALL clients in this process.
"""
if cls._connectionPolicy is not None:
cls._connectionPolicy.close()
cls._connectionPolicy = None
return
@classmethod
def setConnectionPolicyProvider(cls, provider):
""" Set the method for ConnectionFactory to use when it needs to
instantiate its database connection policy.
NOTE: This method should be used with CAUTION. ConnectionFactory's default
behavior should be adequate for all NuPIC code, and this method is provided
primarily for diagnostics. It is designed to only be called by the code
responsible for startup of the process since the provider method has no
impact after ConnectionFactory's connection policy instance is instantiated.
See ConnectionFactory._createDefaultPolicy
Parameters:
----------------------------------------------------------------
provider: The method that instantiates the singleton database
connection policy to be used by ConnectionFactory class.
The method must be compatible with the following signature:
<DatabaseConnectionPolicyIface subclass instance> provider()
"""
cls._connectionPolicyInstanceProvider = provider
return
@classmethod
def _createDefaultPolicy(cls):
""" [private] Create the default database connection policy instance
Parameters:
----------------------------------------------------------------
retval: The default database connection policy instance
"""
logger = _getLogger(cls)
logger.debug(
"Creating database connection policy: platform=%r; pymysql.VERSION=%r",
platform.system(), pymysql.VERSION)
if platform.system() == "Java":
# NOTE: PooledDB doesn't seem to work under Jython
# NOTE: not appropriate for multi-threaded applications.
# TODO: this was fixed in Webware DBUtils r8228, so once
# we pick up a realease with this fix, we should use
# PooledConnectionPolicy for both Jython and Python.
policy = SingleSharedConnectionPolicy()
else:
policy = PooledConnectionPolicy()
return policy
_connectionPolicy = None
""" Our singleton database connection policy instance """
_connectionPolicyInstanceProvider = _createDefaultPolicy
""" This class variable holds the method that DatabaseConnectionPolicy uses
to create the singleton database connection policy instance
"""
# <-- End of class ConnectionFactory
class ConnectionWrapper(object):
""" An instance of this class is returned by
acquireConnection() methods of our database connection policy classes.
"""
_clsNumOutstanding = 0
""" For tracking the count of outstanding instances """
_clsOutstandingInstances = set()
""" tracks outstanding instances of this class while g_max_concurrency is
enabled
"""
def __init__(self, dbConn, cursor, releaser, logger):
"""
Parameters:
----------------------------------------------------------------
dbConn: the underlying database connection instance
cursor: database cursor
releaser: a method to call to release the connection and cursor;
method signature:
None dbConnReleaser(dbConn, cursor)
"""
global g_max_concurrency
try:
self._logger = logger
self.dbConn = dbConn
""" database connection instance """
self.cursor = cursor
""" Public cursor instance. Don't close it directly: Connection.release()
will do the right thing.
"""
self._releaser = releaser
self._addedToInstanceSet = False
""" True if we added self to _clsOutstandingInstances """
self._creationTracebackString = None
""" Instance creation traceback string (if g_max_concurrency is enabled) """
if g_max_concurrency is not None:
# NOTE: must be called *before* _clsNumOutstanding is incremented
self._trackInstanceAndCheckForConcurrencyViolation()
logger.debug("Acquired: %r; numOutstanding=%s",
self, self._clsNumOutstanding)
except:
logger.exception("Exception while instantiating %r;", self)
# Clean up and re-raise
if self._addedToInstanceSet:
self._clsOutstandingInstances.remove(self)
releaser(dbConn=dbConn, cursor=cursor)
raise
else:
self.__class__._clsNumOutstanding += 1
return
def __repr__(self):
return "%s<dbConn=%r, dbConnImpl=%r, cursor=%r, creationTraceback=%r>" % (
self.__class__.__name__, self.dbConn,
getattr(self.dbConn, "_con", "unknown"),
self.cursor, self._creationTracebackString,)
def __enter__(self):
""" [Context Manager protocol method] Permit a ConnectionWrapper instance
to be used in a context manager expression (with ... as:) to facilitate
robust release of resources (instead of try:/finally:/release()). See
examples in ConnectionFactory docstring.
"""
return self
def __exit__(self, exc_type, exc_val, exc_tb):
""" [Context Manager protocol method] Release resources. """
self.release()
# Return False to allow propagation of exception, if any
return False
def release(self):
""" Release the database connection and cursor
The receiver of the Connection instance MUST call this method in order
to reclaim resources
"""
self._logger.debug("Releasing: %r", self)
# Discard self from set of outstanding instances
if self._addedToInstanceSet:
try:
self._clsOutstandingInstances.remove(self)
except:
self._logger.exception(
"Failed to remove self from _clsOutstandingInstances: %r;", self)
raise
self._releaser(dbConn=self.dbConn, cursor=self.cursor)
self.__class__._clsNumOutstanding -= 1
assert self._clsNumOutstanding >= 0, \
"_clsNumOutstanding=%r" % (self._clsNumOutstanding,)
self._releaser = None
self.cursor = None
self.dbConn = None
self._creationTracebackString = None
self._addedToInstanceSet = False
self._logger = None
return
def _trackInstanceAndCheckForConcurrencyViolation(self):
""" Check for concurrency violation and add self to
_clsOutstandingInstances.
ASSUMPTION: Called from constructor BEFORE _clsNumOutstanding is
incremented
"""
global g_max_concurrency, g_max_concurrency_raise_exception
assert g_max_concurrency is not None
assert self not in self._clsOutstandingInstances, repr(self)
# Populate diagnostic info
self._creationTracebackString = traceback.format_stack()
# Check for concurrency violation
if self._clsNumOutstanding >= g_max_concurrency:
# NOTE: It's possible for _clsNumOutstanding to be greater than
# len(_clsOutstandingInstances) if concurrency check was enabled after
# unrelease allocations.
errorMsg = ("With numOutstanding=%r, exceeded concurrency limit=%r "
"when requesting %r. OTHER TRACKED UNRELEASED "
"INSTANCES (%s): %r") % (
self._clsNumOutstanding, g_max_concurrency, self,
len(self._clsOutstandingInstances), self._clsOutstandingInstances,)
self._logger.error(errorMsg)
if g_max_concurrency_raise_exception:
raise ConcurrencyExceededError(errorMsg)
# Add self to tracked instance set
self._clsOutstandingInstances.add(self)
self._addedToInstanceSet = True
return
class DatabaseConnectionPolicyIface(object):
""" Database connection policy base class/interface.
NOTE: We can't use the abc (abstract base class) module because
Jython 2.5.x does not support abc
"""
def close(self):
""" Close the policy instance and its shared database connection. """
raise NotImplementedError()
def acquireConnection(self):
""" Get a Connection instance.
Parameters:
----------------------------------------------------------------
retval: A ConnectionWrapper instance.
Caller is responsible for calling the ConnectionWrapper
instance's release() method to release resources.
"""
raise NotImplementedError()
class SingleSharedConnectionPolicy(DatabaseConnectionPolicyIface):
""" This connection policy maintains a single shared database connection.
NOTE: this type of connection policy is not appropriate for muti-threaded
applications."""
def __init__(self):
""" Consruct an instance. The instance's open() method must be
called to make it ready for acquireConnection() calls.
"""
self._logger = _getLogger(self.__class__)
self._conn = SteadyDB.connect(** _getCommonSteadyDBArgsDict())
self._logger.debug("Created %s", self.__class__.__name__)
return
def close(self):
""" Close the policy instance and its shared database connection. """
self._logger.info("Closing")
if self._conn is not None:
self._conn.close()
self._conn = None
else:
self._logger.warning(
"close() called, but connection policy was alredy closed")
return
def acquireConnection(self):
""" Get a Connection instance.
Parameters:
----------------------------------------------------------------
retval: A ConnectionWrapper instance. NOTE: Caller
is responsible for calling the ConnectionWrapper
instance's release() method or use it in a context manager
expression (with ... as:) to release resources.
"""
self._logger.debug("Acquiring connection")
# Check connection and attempt to re-establish it if it died (this is
# what PooledDB does)
self._conn._ping_check()
connWrap = ConnectionWrapper(dbConn=self._conn,
cursor=self._conn.cursor(),
releaser=self._releaseConnection,
logger=self._logger)
return connWrap
def _releaseConnection(self, dbConn, cursor):
""" Release database connection and cursor; passed as a callback to
ConnectionWrapper
"""
self._logger.debug("Releasing connection")
# Close the cursor
cursor.close()
# NOTE: we don't release the connection, since this connection policy is
# sharing a single connection instance
return
class PooledConnectionPolicy(DatabaseConnectionPolicyIface):
"""This connection policy maintains a pool of connections that are doled out
as needed for each transaction. NOTE: Appropriate for multi-threaded
applications. NOTE: The connections are NOT shared concurrently between
threads.
"""
def __init__(self):
""" Consruct an instance. The instance's open() method must be
called to make it ready for acquireConnection() calls.
"""
self._logger = _getLogger(self.__class__)
self._logger.debug("Opening")
self._pool = PooledDB(**_getCommonSteadyDBArgsDict())
self._logger.info("Created %s", self.__class__.__name__)
return
def close(self):
""" Close the policy instance and its database connection pool. """
self._logger.info("Closing")
if self._pool is not None:
self._pool.close()
self._pool = None
else:
self._logger.warning(
"close() called, but connection policy was alredy closed")
return
def acquireConnection(self):
""" Get a connection from the pool.
Parameters:
----------------------------------------------------------------
retval: A ConnectionWrapper instance. NOTE: Caller
is responsible for calling the ConnectionWrapper
instance's release() method or use it in a context manager
expression (with ... as:) to release resources.
"""
self._logger.debug("Acquiring connection")
dbConn = self._pool.connection(shareable=False)
connWrap = ConnectionWrapper(dbConn=dbConn,
cursor=dbConn.cursor(),
releaser=self._releaseConnection,
logger=self._logger)
return connWrap
def _releaseConnection(self, dbConn, cursor):
""" Release database connection and cursor; passed as a callback to
ConnectionWrapper
"""
self._logger.debug("Releasing connection")
# Close the cursor
cursor.close()
# ... then return db connection back to the pool
dbConn.close()
return
class PerTransactionConnectionPolicy(DatabaseConnectionPolicyIface):
"""This connection policy establishes/breaks a new connection for every
high-level transaction (i.e., API call).
NOTE: this policy is intended for debugging, as it is generally not performant
to establish and tear down db connections for every API call.
"""
def __init__(self):
""" Consruct an instance. The instance's open() method must be
called to make it ready for acquireConnection() calls.
"""
self._logger = _getLogger(self.__class__)
self._opened = True
self._logger.info("Created %s", self.__class__.__name__)
return
def close(self):
""" Close the policy instance. """
self._logger.info("Closing")
if self._opened:
self._opened = False
else:
self._logger.warning(
"close() called, but connection policy was alredy closed")
return
def acquireConnection(self):
""" Create a Connection instance.
Parameters:
----------------------------------------------------------------
retval: A ConnectionWrapper instance. NOTE: Caller
is responsible for calling the ConnectionWrapper
instance's release() method or use it in a context manager
expression (with ... as:) to release resources.
"""
self._logger.debug("Acquiring connection")
dbConn = SteadyDB.connect(** _getCommonSteadyDBArgsDict())
connWrap = ConnectionWrapper(dbConn=dbConn,
cursor=dbConn.cursor(),
releaser=self._releaseConnection,
logger=self._logger)
return connWrap
def _releaseConnection(self, dbConn, cursor):
""" Release database connection and cursor; passed as a callback to
ConnectionWrapper
"""
self._logger.debug("Releasing connection")
# Close the cursor
cursor.close()
# ... then close the database connection
dbConn.close()
return
def _getCommonSteadyDBArgsDict():
""" Returns a dictionary of arguments for DBUtils.SteadyDB.SteadyDBConnection
constructor.
"""
return dict(
creator = pymysql,
host = Configuration.get('nupic.cluster.database.host'),
port = int(Configuration.get('nupic.cluster.database.port')),
user = Configuration.get('nupic.cluster.database.user'),
passwd = Configuration.get('nupic.cluster.database.passwd'),
charset = 'utf8',
use_unicode = True,
setsession = ['SET AUTOCOMMIT = 1'])
def _getLogger(cls, logLevel=None):
""" Gets a logger for the given class in this module
"""
logger = logging.getLogger(
".".join(['com.numenta', _MODULE_NAME, cls.__name__]))
if logLevel is not None:
logger.setLevel(logLevel)
return logger
| agpl-3.0 |
tinkerinestudio/Tinkerine-Suite | TinkerineSuite/pypy/lib-python/2.7/plat-freebsd6/IN.py | 172 | 12416 | # Generated by h2py from /usr/include/netinet/in.h
# Included from sys/cdefs.h
__GNUCLIKE_ASM = 3
__GNUCLIKE_ASM = 2
__GNUCLIKE___TYPEOF = 1
__GNUCLIKE___OFFSETOF = 1
__GNUCLIKE___SECTION = 1
__GNUCLIKE_ATTRIBUTE_MODE_DI = 1
__GNUCLIKE_CTOR_SECTION_HANDLING = 1
__GNUCLIKE_BUILTIN_CONSTANT_P = 1
__GNUCLIKE_BUILTIN_VARARGS = 1
__GNUCLIKE_BUILTIN_STDARG = 1
__GNUCLIKE_BUILTIN_VAALIST = 1
__GNUC_VA_LIST_COMPATIBILITY = 1
__GNUCLIKE_BUILTIN_NEXT_ARG = 1
__GNUCLIKE_BUILTIN_MEMCPY = 1
__CC_SUPPORTS_INLINE = 1
__CC_SUPPORTS___INLINE = 1
__CC_SUPPORTS___INLINE__ = 1
__CC_SUPPORTS___FUNC__ = 1
__CC_SUPPORTS_WARNING = 1
__CC_SUPPORTS_VARADIC_XXX = 1
__CC_SUPPORTS_DYNAMIC_ARRAY_INIT = 1
__CC_INT_IS_32BIT = 1
def __P(protos): return protos
def __STRING(x): return #x
def __XSTRING(x): return __STRING(x)
def __P(protos): return ()
def __STRING(x): return "x"
def __aligned(x): return __attribute__((__aligned__(x)))
def __section(x): return __attribute__((__section__(x)))
def __aligned(x): return __attribute__((__aligned__(x)))
def __section(x): return __attribute__((__section__(x)))
def __nonnull(x): return __attribute__((__nonnull__(x)))
def __predict_true(exp): return __builtin_expect((exp), 1)
def __predict_false(exp): return __builtin_expect((exp), 0)
def __predict_true(exp): return (exp)
def __predict_false(exp): return (exp)
def __format_arg(fmtarg): return __attribute__((__format_arg__ (fmtarg)))
def __FBSDID(s): return __IDSTRING(__CONCAT(__rcsid_,__LINE__),s)
def __RCSID(s): return __IDSTRING(__CONCAT(__rcsid_,__LINE__),s)
def __RCSID_SOURCE(s): return __IDSTRING(__CONCAT(__rcsid_source_,__LINE__),s)
def __SCCSID(s): return __IDSTRING(__CONCAT(__sccsid_,__LINE__),s)
def __COPYRIGHT(s): return __IDSTRING(__CONCAT(__copyright_,__LINE__),s)
_POSIX_C_SOURCE = 199009
_POSIX_C_SOURCE = 199209
__XSI_VISIBLE = 600
_POSIX_C_SOURCE = 200112
__XSI_VISIBLE = 500
_POSIX_C_SOURCE = 199506
_POSIX_C_SOURCE = 198808
__POSIX_VISIBLE = 200112
__ISO_C_VISIBLE = 1999
__POSIX_VISIBLE = 199506
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 199309
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 199209
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 199009
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 198808
__ISO_C_VISIBLE = 0
__POSIX_VISIBLE = 0
__XSI_VISIBLE = 0
__BSD_VISIBLE = 0
__ISO_C_VISIBLE = 1990
__POSIX_VISIBLE = 0
__XSI_VISIBLE = 0
__BSD_VISIBLE = 0
__ISO_C_VISIBLE = 1999
__POSIX_VISIBLE = 200112
__XSI_VISIBLE = 600
__BSD_VISIBLE = 1
__ISO_C_VISIBLE = 1999
# Included from sys/_types.h
# Included from machine/_types.h
# Included from machine/endian.h
_QUAD_HIGHWORD = 1
_QUAD_LOWWORD = 0
_LITTLE_ENDIAN = 1234
_BIG_ENDIAN = 4321
_PDP_ENDIAN = 3412
_BYTE_ORDER = _LITTLE_ENDIAN
LITTLE_ENDIAN = _LITTLE_ENDIAN
BIG_ENDIAN = _BIG_ENDIAN
PDP_ENDIAN = _PDP_ENDIAN
BYTE_ORDER = _BYTE_ORDER
def __word_swap_int_var(x): return \
def __word_swap_int_const(x): return \
def __word_swap_int(x): return __word_swap_int_var(x)
def __byte_swap_int_var(x): return \
def __byte_swap_int_const(x): return \
def __byte_swap_int(x): return __byte_swap_int_var(x)
def __byte_swap_long_var(x): return \
def __byte_swap_long_const(x): return \
def __byte_swap_long(x): return __byte_swap_long_var(x)
def __byte_swap_word_var(x): return \
def __byte_swap_word_const(x): return \
def __byte_swap_word(x): return __byte_swap_word_var(x)
def __htonl(x): return __bswap32(x)
def __htons(x): return __bswap16(x)
def __ntohl(x): return __bswap32(x)
def __ntohs(x): return __bswap16(x)
IPPROTO_IP = 0
IPPROTO_ICMP = 1
IPPROTO_TCP = 6
IPPROTO_UDP = 17
def htonl(x): return __htonl(x)
def htons(x): return __htons(x)
def ntohl(x): return __ntohl(x)
def ntohs(x): return __ntohs(x)
IPPROTO_RAW = 255
INET_ADDRSTRLEN = 16
IPPROTO_HOPOPTS = 0
IPPROTO_IGMP = 2
IPPROTO_GGP = 3
IPPROTO_IPV4 = 4
IPPROTO_IPIP = IPPROTO_IPV4
IPPROTO_ST = 7
IPPROTO_EGP = 8
IPPROTO_PIGP = 9
IPPROTO_RCCMON = 10
IPPROTO_NVPII = 11
IPPROTO_PUP = 12
IPPROTO_ARGUS = 13
IPPROTO_EMCON = 14
IPPROTO_XNET = 15
IPPROTO_CHAOS = 16
IPPROTO_MUX = 18
IPPROTO_MEAS = 19
IPPROTO_HMP = 20
IPPROTO_PRM = 21
IPPROTO_IDP = 22
IPPROTO_TRUNK1 = 23
IPPROTO_TRUNK2 = 24
IPPROTO_LEAF1 = 25
IPPROTO_LEAF2 = 26
IPPROTO_RDP = 27
IPPROTO_IRTP = 28
IPPROTO_TP = 29
IPPROTO_BLT = 30
IPPROTO_NSP = 31
IPPROTO_INP = 32
IPPROTO_SEP = 33
IPPROTO_3PC = 34
IPPROTO_IDPR = 35
IPPROTO_XTP = 36
IPPROTO_DDP = 37
IPPROTO_CMTP = 38
IPPROTO_TPXX = 39
IPPROTO_IL = 40
IPPROTO_IPV6 = 41
IPPROTO_SDRP = 42
IPPROTO_ROUTING = 43
IPPROTO_FRAGMENT = 44
IPPROTO_IDRP = 45
IPPROTO_RSVP = 46
IPPROTO_GRE = 47
IPPROTO_MHRP = 48
IPPROTO_BHA = 49
IPPROTO_ESP = 50
IPPROTO_AH = 51
IPPROTO_INLSP = 52
IPPROTO_SWIPE = 53
IPPROTO_NHRP = 54
IPPROTO_MOBILE = 55
IPPROTO_TLSP = 56
IPPROTO_SKIP = 57
IPPROTO_ICMPV6 = 58
IPPROTO_NONE = 59
IPPROTO_DSTOPTS = 60
IPPROTO_AHIP = 61
IPPROTO_CFTP = 62
IPPROTO_HELLO = 63
IPPROTO_SATEXPAK = 64
IPPROTO_KRYPTOLAN = 65
IPPROTO_RVD = 66
IPPROTO_IPPC = 67
IPPROTO_ADFS = 68
IPPROTO_SATMON = 69
IPPROTO_VISA = 70
IPPROTO_IPCV = 71
IPPROTO_CPNX = 72
IPPROTO_CPHB = 73
IPPROTO_WSN = 74
IPPROTO_PVP = 75
IPPROTO_BRSATMON = 76
IPPROTO_ND = 77
IPPROTO_WBMON = 78
IPPROTO_WBEXPAK = 79
IPPROTO_EON = 80
IPPROTO_VMTP = 81
IPPROTO_SVMTP = 82
IPPROTO_VINES = 83
IPPROTO_TTP = 84
IPPROTO_IGP = 85
IPPROTO_DGP = 86
IPPROTO_TCF = 87
IPPROTO_IGRP = 88
IPPROTO_OSPFIGP = 89
IPPROTO_SRPC = 90
IPPROTO_LARP = 91
IPPROTO_MTP = 92
IPPROTO_AX25 = 93
IPPROTO_IPEIP = 94
IPPROTO_MICP = 95
IPPROTO_SCCSP = 96
IPPROTO_ETHERIP = 97
IPPROTO_ENCAP = 98
IPPROTO_APES = 99
IPPROTO_GMTP = 100
IPPROTO_IPCOMP = 108
IPPROTO_SCTP = 132
IPPROTO_PIM = 103
IPPROTO_CARP = 112
IPPROTO_PGM = 113
IPPROTO_PFSYNC = 240
IPPROTO_OLD_DIVERT = 254
IPPROTO_MAX = 256
IPPROTO_DONE = 257
IPPROTO_DIVERT = 258
IPPROTO_SPACER = 32767
IPPORT_RESERVED = 1024
IPPORT_HIFIRSTAUTO = 49152
IPPORT_HILASTAUTO = 65535
IPPORT_RESERVEDSTART = 600
IPPORT_MAX = 65535
def IN_CLASSA(i): return (((u_int32_t)(i) & 0x80000000) == 0)
IN_CLASSA_NET = 0xff000000
IN_CLASSA_NSHIFT = 24
IN_CLASSA_HOST = 0x00ffffff
IN_CLASSA_MAX = 128
def IN_CLASSB(i): return (((u_int32_t)(i) & 0xc0000000) == 0x80000000)
IN_CLASSB_NET = 0xffff0000
IN_CLASSB_NSHIFT = 16
IN_CLASSB_HOST = 0x0000ffff
IN_CLASSB_MAX = 65536
def IN_CLASSC(i): return (((u_int32_t)(i) & 0xe0000000) == 0xc0000000)
IN_CLASSC_NET = 0xffffff00
IN_CLASSC_NSHIFT = 8
IN_CLASSC_HOST = 0x000000ff
def IN_CLASSD(i): return (((u_int32_t)(i) & 0xf0000000) == 0xe0000000)
IN_CLASSD_NET = 0xf0000000
IN_CLASSD_NSHIFT = 28
IN_CLASSD_HOST = 0x0fffffff
def IN_MULTICAST(i): return IN_CLASSD(i)
def IN_EXPERIMENTAL(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
def IN_BADCLASS(i): return (((u_int32_t)(i) & 0xf0000000) == 0xf0000000)
INADDR_NONE = 0xffffffff
IN_LOOPBACKNET = 127
IP_OPTIONS = 1
IP_HDRINCL = 2
IP_TOS = 3
IP_TTL = 4
IP_RECVOPTS = 5
IP_RECVRETOPTS = 6
IP_RECVDSTADDR = 7
IP_SENDSRCADDR = IP_RECVDSTADDR
IP_RETOPTS = 8
IP_MULTICAST_IF = 9
IP_MULTICAST_TTL = 10
IP_MULTICAST_LOOP = 11
IP_ADD_MEMBERSHIP = 12
IP_DROP_MEMBERSHIP = 13
IP_MULTICAST_VIF = 14
IP_RSVP_ON = 15
IP_RSVP_OFF = 16
IP_RSVP_VIF_ON = 17
IP_RSVP_VIF_OFF = 18
IP_PORTRANGE = 19
IP_RECVIF = 20
IP_IPSEC_POLICY = 21
IP_FAITH = 22
IP_ONESBCAST = 23
IP_FW_TABLE_ADD = 40
IP_FW_TABLE_DEL = 41
IP_FW_TABLE_FLUSH = 42
IP_FW_TABLE_GETSIZE = 43
IP_FW_TABLE_LIST = 44
IP_FW_ADD = 50
IP_FW_DEL = 51
IP_FW_FLUSH = 52
IP_FW_ZERO = 53
IP_FW_GET = 54
IP_FW_RESETLOG = 55
IP_DUMMYNET_CONFIGURE = 60
IP_DUMMYNET_DEL = 61
IP_DUMMYNET_FLUSH = 62
IP_DUMMYNET_GET = 64
IP_RECVTTL = 65
IP_MINTTL = 66
IP_DONTFRAG = 67
IP_DEFAULT_MULTICAST_TTL = 1
IP_DEFAULT_MULTICAST_LOOP = 1
IP_MAX_MEMBERSHIPS = 20
IP_PORTRANGE_DEFAULT = 0
IP_PORTRANGE_HIGH = 1
IP_PORTRANGE_LOW = 2
IPPROTO_MAXID = (IPPROTO_AH + 1)
IPCTL_FORWARDING = 1
IPCTL_SENDREDIRECTS = 2
IPCTL_DEFTTL = 3
IPCTL_DEFMTU = 4
IPCTL_RTEXPIRE = 5
IPCTL_RTMINEXPIRE = 6
IPCTL_RTMAXCACHE = 7
IPCTL_SOURCEROUTE = 8
IPCTL_DIRECTEDBROADCAST = 9
IPCTL_INTRQMAXLEN = 10
IPCTL_INTRQDROPS = 11
IPCTL_STATS = 12
IPCTL_ACCEPTSOURCEROUTE = 13
IPCTL_FASTFORWARDING = 14
IPCTL_KEEPFAITH = 15
IPCTL_GIF_TTL = 16
IPCTL_MAXID = 17
def in_nullhost(x): return ((x).s_addr == INADDR_ANY)
# Included from netinet6/in6.h
__KAME_VERSION = "FreeBSD"
IPV6PORT_RESERVED = 1024
IPV6PORT_ANONMIN = 49152
IPV6PORT_ANONMAX = 65535
IPV6PORT_RESERVEDMIN = 600
IPV6PORT_RESERVEDMAX = (IPV6PORT_RESERVED-1)
INET6_ADDRSTRLEN = 46
IPV6_ADDR_INT32_ONE = 1
IPV6_ADDR_INT32_TWO = 2
IPV6_ADDR_INT32_MNL = 0xff010000
IPV6_ADDR_INT32_MLL = 0xff020000
IPV6_ADDR_INT32_SMP = 0x0000ffff
IPV6_ADDR_INT16_ULL = 0xfe80
IPV6_ADDR_INT16_USL = 0xfec0
IPV6_ADDR_INT16_MLL = 0xff02
IPV6_ADDR_INT32_ONE = 0x01000000
IPV6_ADDR_INT32_TWO = 0x02000000
IPV6_ADDR_INT32_MNL = 0x000001ff
IPV6_ADDR_INT32_MLL = 0x000002ff
IPV6_ADDR_INT32_SMP = 0xffff0000
IPV6_ADDR_INT16_ULL = 0x80fe
IPV6_ADDR_INT16_USL = 0xc0fe
IPV6_ADDR_INT16_MLL = 0x02ff
def IN6_IS_ADDR_UNSPECIFIED(a): return \
def IN6_IS_ADDR_LOOPBACK(a): return \
def IN6_IS_ADDR_V4COMPAT(a): return \
def IN6_IS_ADDR_V4MAPPED(a): return \
IPV6_ADDR_SCOPE_NODELOCAL = 0x01
IPV6_ADDR_SCOPE_INTFACELOCAL = 0x01
IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
IPV6_ADDR_SCOPE_SITELOCAL = 0x05
IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
IPV6_ADDR_SCOPE_GLOBAL = 0x0e
__IPV6_ADDR_SCOPE_NODELOCAL = 0x01
__IPV6_ADDR_SCOPE_INTFACELOCAL = 0x01
__IPV6_ADDR_SCOPE_LINKLOCAL = 0x02
__IPV6_ADDR_SCOPE_SITELOCAL = 0x05
__IPV6_ADDR_SCOPE_ORGLOCAL = 0x08
__IPV6_ADDR_SCOPE_GLOBAL = 0x0e
def IN6_IS_ADDR_LINKLOCAL(a): return \
def IN6_IS_ADDR_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_NODELOCAL(a): return \
def IN6_IS_ADDR_MC_INTFACELOCAL(a): return \
def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
def IN6_IS_ADDR_MC_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
def IN6_IS_ADDR_MC_GLOBAL(a): return \
def IN6_IS_ADDR_MC_NODELOCAL(a): return \
def IN6_IS_ADDR_MC_LINKLOCAL(a): return \
def IN6_IS_ADDR_MC_SITELOCAL(a): return \
def IN6_IS_ADDR_MC_ORGLOCAL(a): return \
def IN6_IS_ADDR_MC_GLOBAL(a): return \
def IN6_IS_SCOPE_LINKLOCAL(a): return \
def IFA6_IS_DEPRECATED(a): return \
def IFA6_IS_INVALID(a): return \
IPV6_OPTIONS = 1
IPV6_RECVOPTS = 5
IPV6_RECVRETOPTS = 6
IPV6_RECVDSTADDR = 7
IPV6_RETOPTS = 8
IPV6_SOCKOPT_RESERVED1 = 3
IPV6_UNICAST_HOPS = 4
IPV6_MULTICAST_IF = 9
IPV6_MULTICAST_HOPS = 10
IPV6_MULTICAST_LOOP = 11
IPV6_JOIN_GROUP = 12
IPV6_LEAVE_GROUP = 13
IPV6_PORTRANGE = 14
ICMP6_FILTER = 18
IPV6_2292PKTINFO = 19
IPV6_2292HOPLIMIT = 20
IPV6_2292NEXTHOP = 21
IPV6_2292HOPOPTS = 22
IPV6_2292DSTOPTS = 23
IPV6_2292RTHDR = 24
IPV6_2292PKTOPTIONS = 25
IPV6_CHECKSUM = 26
IPV6_V6ONLY = 27
IPV6_BINDV6ONLY = IPV6_V6ONLY
IPV6_IPSEC_POLICY = 28
IPV6_FAITH = 29
IPV6_FW_ADD = 30
IPV6_FW_DEL = 31
IPV6_FW_FLUSH = 32
IPV6_FW_ZERO = 33
IPV6_FW_GET = 34
IPV6_RTHDRDSTOPTS = 35
IPV6_RECVPKTINFO = 36
IPV6_RECVHOPLIMIT = 37
IPV6_RECVRTHDR = 38
IPV6_RECVHOPOPTS = 39
IPV6_RECVDSTOPTS = 40
IPV6_RECVRTHDRDSTOPTS = 41
IPV6_USE_MIN_MTU = 42
IPV6_RECVPATHMTU = 43
IPV6_PATHMTU = 44
IPV6_REACHCONF = 45
IPV6_PKTINFO = 46
IPV6_HOPLIMIT = 47
IPV6_NEXTHOP = 48
IPV6_HOPOPTS = 49
IPV6_DSTOPTS = 50
IPV6_RTHDR = 51
IPV6_PKTOPTIONS = 52
IPV6_RECVTCLASS = 57
IPV6_AUTOFLOWLABEL = 59
IPV6_TCLASS = 61
IPV6_DONTFRAG = 62
IPV6_PREFER_TEMPADDR = 63
IPV6_RTHDR_LOOSE = 0
IPV6_RTHDR_STRICT = 1
IPV6_RTHDR_TYPE_0 = 0
IPV6_DEFAULT_MULTICAST_HOPS = 1
IPV6_DEFAULT_MULTICAST_LOOP = 1
IPV6_PORTRANGE_DEFAULT = 0
IPV6_PORTRANGE_HIGH = 1
IPV6_PORTRANGE_LOW = 2
IPV6PROTO_MAXID = (IPPROTO_PIM + 1)
IPV6CTL_FORWARDING = 1
IPV6CTL_SENDREDIRECTS = 2
IPV6CTL_DEFHLIM = 3
IPV6CTL_DEFMTU = 4
IPV6CTL_FORWSRCRT = 5
IPV6CTL_STATS = 6
IPV6CTL_MRTSTATS = 7
IPV6CTL_MRTPROTO = 8
IPV6CTL_MAXFRAGPACKETS = 9
IPV6CTL_SOURCECHECK = 10
IPV6CTL_SOURCECHECK_LOGINT = 11
IPV6CTL_ACCEPT_RTADV = 12
IPV6CTL_KEEPFAITH = 13
IPV6CTL_LOG_INTERVAL = 14
IPV6CTL_HDRNESTLIMIT = 15
IPV6CTL_DAD_COUNT = 16
IPV6CTL_AUTO_FLOWLABEL = 17
IPV6CTL_DEFMCASTHLIM = 18
IPV6CTL_GIF_HLIM = 19
IPV6CTL_KAME_VERSION = 20
IPV6CTL_USE_DEPRECATED = 21
IPV6CTL_RR_PRUNE = 22
IPV6CTL_MAPPED_ADDR = 23
IPV6CTL_V6ONLY = 24
IPV6CTL_RTEXPIRE = 25
IPV6CTL_RTMINEXPIRE = 26
IPV6CTL_RTMAXCACHE = 27
IPV6CTL_USETEMPADDR = 32
IPV6CTL_TEMPPLTIME = 33
IPV6CTL_TEMPVLTIME = 34
IPV6CTL_AUTO_LINKLOCAL = 35
IPV6CTL_RIP6STATS = 36
IPV6CTL_PREFER_TEMPADDR = 37
IPV6CTL_ADDRCTLPOLICY = 38
IPV6CTL_USE_DEFAULTZONE = 39
IPV6CTL_MAXFRAGS = 41
IPV6CTL_IFQ = 42
IPV6CTL_ISATAPRTR = 43
IPV6CTL_MCAST_PMTU = 44
IPV6CTL_STEALTH = 45
IPV6CTL_MAXID = 46
| agpl-3.0 |
wvengen/ndg_oauth_server | ndg/oauth/server/lib/authenticate/password_client_authenticator.py | 1 | 2157 | """OAuth 2.0 WSGI server middleware providing MyProxy certificates as access tokens
"""
__author__ = "W van Engen"
__date__ = "01/11/12"
__copyright__ = "(C) 2011 FOM / Nikhef"
__license__ = "BSD - see LICENSE file in top-level directory"
__contact__ = "wvengen+oauth2@nikhef.nl"
__revision__ = "$Id$"
from base64 import b64decode
from ndg.oauth.server.lib.authenticate.authenticator_interface import AuthenticatorInterface
from ndg.oauth.server.lib.oauth.oauth_exception import OauthException
class PasswordAuthenticator(AuthenticatorInterface):
"""
Authenticator implementation that checks for a client/resource id+secret
combination, either in the HTTP Authorization header, or in the request
parameters, according to the OAuth 2 RFC, section 2.3.1
@todo implement protection against brute force attacks (MUST)
"""
def __init__(self, typ, register):
AuthenticatorInterface.__init__(self, typ)
self._register = register
def authenticate(self, request):
"""
Checks for id/secret pair in Authorization header, or else
POSTed request parameters.
@type request: webob.Request
@param request: HTTP request object
@rtype: str
@return: id of authenticated client
Raise OauthException if authentication fails.
"""
cid = secret = None
if 'Authorization' in request.headers and request.headers['Authorization'].startswith('Basic'):
cid, secret = b64decode(request.headers['Authorization'][6:]).split(':',1)
elif 'client_id' in request.POST and 'client_secret' in request.POST:
cid = request.POST['client_id']
secret = request.POST['client_secret']
if not cid or not secret:
raise OauthException('invalid_%s'%self.typ, 'No %s password authentication supplied'%self.typ)
for authorization in self._register.register.itervalues():
if authorization.id == cid and authorization.secret == secret:
return authorization.id
raise OauthException('invalid_%s'%self.typ, ('%s access denied: %s' % (cid, self.typ)))
| bsd-3-clause |
ruacon35/ns3-wireless-planning.ns-3 | bindings/python/apidefs/gcc-ILP32/ns3_module_onoff.py | 8 | 5346 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## onoff-application.h: ns3::OnOffApplication [class]
module.add_class('OnOffApplication', parent=root_module['ns3::Application'])
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace TimeStepPrecision
nested_module = module.add_cpp_namespace('TimeStepPrecision')
register_types_ns3_TimeStepPrecision(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_TimeStepPrecision(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3OnOffApplication_methods(root_module, root_module['ns3::OnOffApplication'])
return
def register_Ns3OnOffApplication_methods(root_module, cls):
## onoff-application.h: ns3::OnOffApplication::OnOffApplication(ns3::OnOffApplication const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OnOffApplication const &', 'arg0')])
## onoff-application.h: ns3::OnOffApplication::OnOffApplication() [constructor]
cls.add_constructor([])
## onoff-application.h: static ns3::TypeId ns3::OnOffApplication::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## onoff-application.h: void ns3::OnOffApplication::SetMaxBytes(uint32_t maxBytes) [member function]
cls.add_method('SetMaxBytes',
'void',
[param('uint32_t', 'maxBytes')])
## onoff-application.h: void ns3::OnOffApplication::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## onoff-application.h: void ns3::OnOffApplication::StartApplication() [member function]
cls.add_method('StartApplication',
'void',
[],
visibility='private', is_virtual=True)
## onoff-application.h: void ns3::OnOffApplication::StopApplication() [member function]
cls.add_method('StopApplication',
'void',
[],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_TimeStepPrecision(module.get_submodule('TimeStepPrecision'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_TimeStepPrecision(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def register_functions_ns3_olsr(module, root_module):
return
| gpl-2.0 |
apache/incubator-airflow | tests/providers/google/cloud/operators/test_dataproc_system.py | 10 | 2378 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from airflow.providers.google.cloud.example_dags.example_dataproc import BUCKET, PYSPARK_MAIN, SPARKR_MAIN
from tests.providers.google.cloud.utils.gcp_authenticator import GCP_DATAPROC_KEY
from tests.test_utils.gcp_system_helpers import CLOUD_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
GCS_URI = f"gs://{BUCKET}"
pyspark_file = """
#!/usr/bin/python
import pyspark
sc = pyspark.SparkContext()
rdd = sc.parallelize(['Hello,', 'world!'])
words = sorted(rdd.collect())
print(words)
"""
sparkr_file = """
#!/usr/bin/r
if (nchar(Sys.getenv("SPARK_HOME")) < 1) {
Sys.setenv(SPARK_HOME = "/home/spark")
}
library(SparkR, lib.loc = c(file.path(Sys.getenv("SPARK_HOME"), "R", "lib")))
sparkR.session()
# Create the SparkDataFrame
df <- as.DataFrame(faithful)
head(summarize(groupBy(df, df$waiting), count = n(df$waiting)))
"""
@pytest.mark.backend("mysql", "postgres")
@pytest.mark.credential_file(GCP_DATAPROC_KEY)
class DataprocExampleDagsTest(GoogleSystemTest):
@provide_gcp_context(GCP_DATAPROC_KEY)
def setUp(self):
super().setUp()
self.create_gcs_bucket(BUCKET)
self.upload_content_to_gcs(lines=pyspark_file, bucket=GCS_URI, filename=PYSPARK_MAIN)
self.upload_content_to_gcs(lines=sparkr_file, bucket=GCS_URI, filename=SPARKR_MAIN)
@provide_gcp_context(GCP_DATAPROC_KEY)
def tearDown(self):
self.delete_gcs_bucket(BUCKET)
super().tearDown()
@provide_gcp_context(GCP_DATAPROC_KEY)
def test_run_example_dag(self):
self.run_dag(dag_id="example_gcp_dataproc", dag_folder=CLOUD_DAG_FOLDER)
| apache-2.0 |
wkennington/rethinkdb | external/v8_3.30.33.16/build/gyp/pylib/gyp/generator/ninja_test.py | 610 | 1611 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the ninja.py file. """
import gyp.generator.ninja as ninja
import unittest
import StringIO
import sys
import TestCommon
class TestPrefixesAndSuffixes(unittest.TestCase):
def test_BinaryNamesWindows(self):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
'build.ninja', 'win')
spec = { 'target_name': 'wee' }
self.assertTrue(writer.ComputeOutputFileName(spec, 'executable').
endswith('.exe'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.dll'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.lib'))
def test_BinaryNamesLinux(self):
writer = ninja.NinjaWriter('foo', 'wee', '.', '.', 'build.ninja', '.',
'build.ninja', 'linux')
spec = { 'target_name': 'wee' }
self.assertTrue('.' not in writer.ComputeOutputFileName(spec,
'executable'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
startswith('lib'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'shared_library').
endswith('.so'))
self.assertTrue(writer.ComputeOutputFileName(spec, 'static_library').
endswith('.a'))
if __name__ == '__main__':
unittest.main()
| agpl-3.0 |
ypwalter/evennia | evennia/server/session.py | 4 | 5214 | """
This module defines a generic session class. All connection instances
(both on Portal and Server side) should inherit from this class.
"""
import time
#------------------------------------------------------------
# Server Session
#------------------------------------------------------------
class Session(object):
"""
This class represents a player's session and is a template for
both portal- and server-side sessions.
Each connection will see two session instances created:
1. A Portal session. This is customized for the respective connection
protocols that Evennia supports, like Telnet, SSH etc. The Portal
session must call init_session() as part of its initialization. The
respective hook methods should be connected to the methods unique
for the respective protocol so that there is a unified interface
to Evennia.
2. A Server session. This is the same for all connected players,
regardless of how they connect.
The Portal and Server have their own respective sessionhandlers. These
are synced whenever new connections happen or the Server restarts etc,
which means much of the same information must be stored in both places
e.g. the portal can re-sync with the server when the server reboots.
"""
# names of attributes that should be affected by syncing.
_attrs_to_sync = ('protocol_key', 'address', 'suid', 'sessid', 'uid',
'uname', 'logged_in', 'puid', 'encoding', 'screenreader',
'conn_time', 'cmd_last', 'cmd_last_visible', 'cmd_total',
'protocol_flags', 'server_data', "cmdset_storage_string")
def init_session(self, protocol_key, address, sessionhandler):
"""
Initialize the Session. This should be called by the protocol when
a new session is established.
Args:
protocol_key (str): By default, one of 'telnet', 'ssh',
'ssl' or 'web'.
address (str): Client address.
sessionhandler (SessionHandler): Reference to the
main sessionhandler instance.
"""
# This is currently 'telnet', 'ssh', 'ssl' or 'web'
self.protocol_key = protocol_key
# Protocol address tied to this session
self.address = address
# suid is used by some protocols, it's a hex key.
self.suid = None
# unique id for this session
self.sessid = 0 # no sessid yet
# database id for the user connected to this session
self.uid = None
# user name, for easier tracking of sessions
self.uname = None
# if user has authenticated already or not
self.logged_in = False
# database id of puppeted object (if any)
self.puid = None
# session time statistics
self.conn_time = time.time()
self.cmd_last_visible = self.conn_time
self.cmd_last = self.conn_time
self.cmd_total = 0
self.encoding = "utf-8"
self.screenreader = False
self.protocol_flags = {}
self.server_data = {}
# a back-reference to the relevant sessionhandler this
# session is stored in.
self.sessionhandler = sessionhandler
def get_sync_data(self):
"""
Get all data relevant to sync the session.
Args:
syncdata (dict): All syncdata values, based on
the keys given by self._attrs_to_sync.
"""
return dict((key, value) for key, value in self.__dict__.items()
if key in self._attrs_to_sync)
def load_sync_data(self, sessdata):
"""
Takes a session dictionary, as created by get_sync_data, and
loads it into the correct properties of the session.
Args:
sessdata (dict): Session data dictionary.
"""
for propname, value in sessdata.items():
setattr(self, propname, value)
def at_sync(self):
"""
Called after a session has been fully synced (including
secondary operations such as setting self.player based
on uid etc).
"""
pass
# access hooks
def disconnect(self, reason=None):
"""
generic hook called from the outside to disconnect this session
should be connected to the protocols actual disconnect mechanism.
Args:
reason (str): Eventual text motivating the disconnect.
"""
pass
def data_out(self, text=None, **kwargs):
"""
Generic hook for sending data out through the protocol. Server
protocols can use this right away. Portal sessions
should overload this to format/handle the outgoing data as needed.
Kwargs:
text (str): Text data
kwargs (any): Other data to the protocol.
"""
pass
def data_in(self, text=None, **kwargs):
"""
Hook for protocols to send incoming data to the engine.
Kwargs:
text (str): Text data
kwargs (any): Other data from the protocol.
"""
pass
| bsd-3-clause |
numenta/nupic | src/nupic/support/configuration_custom.py | 10 | 10011 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
This Configuration implementation allows for persistent configuration updates
stored in ``nupic-custom.xml`` in the site conf folder.
"""
from __future__ import with_statement
from copy import copy
import errno
import logging
import os
import sys
import traceback
from xml.etree import ElementTree
from nupic.support.fs_helpers import makeDirectoryFromAbsolutePath
from nupic.support.configuration_base import Configuration as ConfigurationBase
def _getLogger():
return logging.getLogger("com.numenta.nupic.tools.configuration_custom")
class Configuration(ConfigurationBase):
"""
This class extends the
:class:`nupic.support.configuration_base.ConfigurationBase` implementation
with the ability to read and write custom, persistent parameters. The custom
settings will be stored in the ``nupic-custom.xml`` file.
If the environment variable ``NTA_CONF_PATH`` is defined, then the
configuration files are expected to be in the ``NTA_CONF_PATH`` search path,
which is a ``:`` separated list of directories (on Windows the separator is a
``;``). If ``NTA_CONF_PATH`` is not defined, then it is assumed to be
``$NTA/conf/default`` (typically ``~/nupic/current/conf/default``).
"""
@classmethod
def getCustomDict(cls):
"""
returns: (dict) containing all custom configuration properties.
"""
return _CustomConfigurationFileWrapper.getCustomDict()
@classmethod
def setCustomProperty(cls, propertyName, value):
"""
Set a single custom setting and persist it to the custom configuration
store.
:param propertyName: (string) containing the name of the property to get
:param value: (object) value to set the property to
"""
cls.setCustomProperties({propertyName : value})
@classmethod
def setCustomProperties(cls, properties):
"""
Set multiple custom properties and persist them to the custom configuration
store.
:param properties: (dict) of property name/value pairs to set
"""
_getLogger().info("Setting custom configuration properties=%r; caller=%r",
properties, traceback.format_stack())
_CustomConfigurationFileWrapper.edit(properties)
for propertyName, value in properties.iteritems():
cls.set(propertyName, value)
@classmethod
def clear(cls):
"""
Clear all configuration properties from in-memory cache, but do NOT alter
the custom configuration file. Used in unit-testing.
"""
# Clear the in-memory settings cache, forcing reload upon subsequent "get"
# request.
super(Configuration, cls).clear()
# Reset in-memory custom configuration info.
_CustomConfigurationFileWrapper.clear(persistent=False)
@classmethod
def resetCustomConfig(cls):
"""
Clear all custom configuration settings and delete the persistent custom
configuration store.
"""
_getLogger().info("Resetting all custom configuration properties; "
"caller=%r", traceback.format_stack())
# Clear the in-memory settings cache, forcing reload upon subsequent "get"
# request.
super(Configuration, cls).clear()
# Delete the persistent custom configuration store and reset in-memory
# custom configuration info
_CustomConfigurationFileWrapper.clear(persistent=True)
@classmethod
def loadCustomConfig(cls):
"""
Loads custom configuration settings from their persistent storage.
.. warning :: DO NOT CALL THIS: It's typically not necessary to call this
method directly. This method exists *solely* for the benefit of
``prepare_conf.py``, which needs to load configuration files selectively.
"""
cls.readConfigFile(_CustomConfigurationFileWrapper.customFileName)
@classmethod
def _readStdConfigFiles(cls):
""" Intercept the _readStdConfigFiles call from our base config class to
read in base and custom configuration settings.
"""
super(Configuration, cls)._readStdConfigFiles()
cls.loadCustomConfig()
class _CustomConfigurationFileWrapper(object):
"""
Private class to handle creation, deletion and editing of the custom
configuration file used by this implementation of Configuration.
Supports persistent changes to nupic-custom.xml configuration file.
This class only applies changes to the local instance.
For cluster wide changes see nupic-services.py or nupic.cluster.NupicServices
"""
# Name of the custom xml file to be created
customFileName = 'nupic-custom.xml'
# Stores the path to the file
# If none, findConfigFile is used to find path to file; defaults to
# NTA_CONF_PATH[0]
_path = None
@classmethod
def clear(cls, persistent=False):
""" If persistent is True, delete the temporary file
Parameters:
----------------------------------------------------------------
persistent: if True, custom configuration file is deleted
"""
if persistent:
try:
os.unlink(cls.getPath())
except OSError, e:
if e.errno != errno.ENOENT:
_getLogger().exception("Error %s while trying to remove dynamic " \
"configuration file: %s", e.errno, cls.getPath())
raise
cls._path = None
@classmethod
def getCustomDict(cls):
""" Returns a dict of all temporary values in custom configuration file
"""
if not os.path.exists(cls.getPath()):
return dict()
properties = Configuration._readConfigFile(os.path.basename(
cls.getPath()), os.path.dirname(cls.getPath()))
values = dict()
for propName in properties:
if 'value' in properties[propName]:
values[propName] = properties[propName]['value']
return values
@classmethod
def edit(cls, properties):
""" Edits the XML configuration file with the parameters specified by
properties
Parameters:
----------------------------------------------------------------
properties: dict of settings to be applied to the custom configuration store
(key is property name, value is value)
"""
copyOfProperties = copy(properties)
configFilePath = cls.getPath()
try:
with open(configFilePath, 'r') as fp:
contents = fp.read()
except IOError, e:
if e.errno != errno.ENOENT:
_getLogger().exception("Error %s reading custom configuration store "
"from %s, while editing properties %s.",
e.errno, configFilePath, properties)
raise
contents = '<configuration/>'
try:
elements = ElementTree.XML(contents)
ElementTree.tostring(elements)
except Exception, e:
# Raising error as RuntimeError with custom message since ElementTree
# exceptions aren't clear.
msg = "File contents of custom configuration is corrupt. File " \
"location: %s; Contents: '%s'. Original Error (%s): %s." % \
(configFilePath, contents, type(e), e)
_getLogger().exception(msg)
raise RuntimeError(msg), None, sys.exc_info()[2]
if elements.tag != 'configuration':
e = "Expected top-level element to be 'configuration' but got '%s'" % \
(elements.tag)
_getLogger().error(e)
raise RuntimeError(e)
# Apply new properties to matching settings in the custom config store;
# pop matching properties from our copy of the properties dict
for propertyItem in elements.findall('./property'):
propInfo = dict((attr.tag, attr.text) for attr in propertyItem)
name = propInfo['name']
if name in copyOfProperties:
foundValues = propertyItem.findall('./value')
if len(foundValues) > 0:
foundValues[0].text = str(copyOfProperties.pop(name))
if not copyOfProperties:
break
else:
e = "Property %s missing value tag." % (name,)
_getLogger().error(e)
raise RuntimeError(e)
# Add unmatched remaining properties to custom config store
for propertyName, value in copyOfProperties.iteritems():
newProp = ElementTree.Element('property')
nameTag = ElementTree.Element('name')
nameTag.text = propertyName
newProp.append(nameTag)
valueTag = ElementTree.Element('value')
valueTag.text = str(value)
newProp.append(valueTag)
elements.append(newProp)
try:
makeDirectoryFromAbsolutePath(os.path.dirname(configFilePath))
with open(configFilePath,'w') as fp:
fp.write(ElementTree.tostring(elements))
except Exception, e:
_getLogger().exception("Error while saving custom configuration "
"properties %s in %s.", properties, configFilePath)
raise
@classmethod
def _setPath(cls):
""" Sets the path of the custom configuration file
"""
cls._path = os.path.join(os.environ['NTA_DYNAMIC_CONF_DIR'],
cls.customFileName)
@classmethod
def getPath(cls):
""" Get the path of the custom configuration file
"""
if cls._path is None:
cls._setPath()
return cls._path
| agpl-3.0 |
mancoast/CPythonPyc_test | cpython/230_test_unpack.py | 9 | 2513 | from test.test_support import TestFailed, verbose
t = (1, 2, 3)
l = [4, 5, 6]
class Seq:
def __getitem__(self, i):
if i >= 0 and i < 3: return i
raise IndexError
a = -1
b = -1
c = -1
# unpack tuple
if verbose:
print 'unpack tuple'
a, b, c = t
if a != 1 or b != 2 or c != 3:
raise TestFailed
# unpack list
if verbose:
print 'unpack list'
a, b, c = l
if a != 4 or b != 5 or c != 6:
raise TestFailed
# unpack implied tuple
if verbose:
print 'unpack implied tuple'
a, b, c = 7, 8, 9
if a != 7 or b != 8 or c != 9:
raise TestFailed
# unpack string... fun!
if verbose:
print 'unpack string'
a, b, c = 'one'
if a != 'o' or b != 'n' or c != 'e':
raise TestFailed
# unpack generic sequence
if verbose:
print 'unpack sequence'
a, b, c = Seq()
if a != 0 or b != 1 or c != 2:
raise TestFailed
# single element unpacking, with extra syntax
if verbose:
print 'unpack single tuple/list'
st = (99,)
sl = [100]
a, = st
if a != 99:
raise TestFailed
b, = sl
if b != 100:
raise TestFailed
# now for some failures
# unpacking non-sequence
if verbose:
print 'unpack non-sequence'
try:
a, b, c = 7
raise TestFailed
except TypeError:
pass
# unpacking tuple of wrong size
if verbose:
print 'unpack tuple wrong size'
try:
a, b = t
raise TestFailed
except ValueError:
pass
# unpacking list of wrong size
if verbose:
print 'unpack list wrong size'
try:
a, b = l
raise TestFailed
except ValueError:
pass
# unpacking sequence too short
if verbose:
print 'unpack sequence too short'
try:
a, b, c, d = Seq()
raise TestFailed
except ValueError:
pass
# unpacking sequence too long
if verbose:
print 'unpack sequence too long'
try:
a, b = Seq()
raise TestFailed
except ValueError:
pass
# unpacking a sequence where the test for too long raises a different
# kind of error
class BozoError(Exception):
pass
class BadSeq:
def __getitem__(self, i):
if i >= 0 and i < 3:
return i
elif i == 3:
raise BozoError
else:
raise IndexError
# trigger code while not expecting an IndexError
if verbose:
print 'unpack sequence too long, wrong error'
try:
a, b, c, d, e = BadSeq()
raise TestFailed
except BozoError:
pass
# trigger code while expecting an IndexError
if verbose:
print 'unpack sequence too short, wrong error'
try:
a, b, c = BadSeq()
raise TestFailed
except BozoError:
pass
| gpl-3.0 |
huangwenjun06/libvpx_mips | third_party/googletest/src/test/gtest_throw_on_failure_test.py | 2917 | 5766 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests Google Test's throw-on-failure mode with exceptions disabled.
This script invokes gtest_throw_on_failure_test_ (a program written with
Google Test) with different environments and command line flags.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
# Constants.
# The command line flag for enabling/disabling the throw-on-failure mode.
THROW_ON_FAILURE = 'gtest_throw_on_failure'
# Path to the gtest_throw_on_failure_test_ program, compiled with
# exceptions disabled.
EXE_PATH = gtest_test_utils.GetTestExecutablePath(
'gtest_throw_on_failure_test_')
# Utilities.
def SetEnvVar(env_var, value):
"""Sets an environment variable to a given value; unsets it when the
given value is None.
"""
env_var = env_var.upper()
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def Run(command):
"""Runs a command; returns True/False if its exit code is/isn't 0."""
print 'Running "%s". . .' % ' '.join(command)
p = gtest_test_utils.Subprocess(command)
return p.exited and p.exit_code == 0
# The tests. TODO(wan@google.com): refactor the class to share common
# logic with code in gtest_break_on_failure_unittest.py.
class ThrowOnFailureTest(gtest_test_utils.TestCase):
"""Tests the throw-on-failure mode."""
def RunAndVerify(self, env_var_value, flag_value, should_fail):
"""Runs gtest_throw_on_failure_test_ and verifies that it does
(or does not) exit with a non-zero code.
Args:
env_var_value: value of the GTEST_BREAK_ON_FAILURE environment
variable; None if the variable should be unset.
flag_value: value of the --gtest_break_on_failure flag;
None if the flag should not be present.
should_fail: True iff the program is expected to fail.
"""
SetEnvVar(THROW_ON_FAILURE, env_var_value)
if env_var_value is None:
env_var_value_msg = ' is not set'
else:
env_var_value_msg = '=' + env_var_value
if flag_value is None:
flag = ''
elif flag_value == '0':
flag = '--%s=0' % THROW_ON_FAILURE
else:
flag = '--%s' % THROW_ON_FAILURE
command = [EXE_PATH]
if flag:
command.append(flag)
if should_fail:
should_or_not = 'should'
else:
should_or_not = 'should not'
failed = not Run(command)
SetEnvVar(THROW_ON_FAILURE, None)
msg = ('when %s%s, an assertion failure in "%s" %s cause a non-zero '
'exit code.' %
(THROW_ON_FAILURE, env_var_value_msg, ' '.join(command),
should_or_not))
self.assert_(failed == should_fail, msg)
def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(env_var_value=None, flag_value=None, should_fail=False)
def testThrowOnFailureEnvVar(self):
"""Tests using the GTEST_THROW_ON_FAILURE environment variable."""
self.RunAndVerify(env_var_value='0',
flag_value=None,
should_fail=False)
self.RunAndVerify(env_var_value='1',
flag_value=None,
should_fail=True)
def testThrowOnFailureFlag(self):
"""Tests using the --gtest_throw_on_failure flag."""
self.RunAndVerify(env_var_value=None,
flag_value='0',
should_fail=False)
self.RunAndVerify(env_var_value=None,
flag_value='1',
should_fail=True)
def testThrowOnFailureFlagOverridesEnvVar(self):
"""Tests that --gtest_throw_on_failure overrides GTEST_THROW_ON_FAILURE."""
self.RunAndVerify(env_var_value='0',
flag_value='0',
should_fail=False)
self.RunAndVerify(env_var_value='0',
flag_value='1',
should_fail=True)
self.RunAndVerify(env_var_value='1',
flag_value='0',
should_fail=False)
self.RunAndVerify(env_var_value='1',
flag_value='1',
should_fail=True)
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
c4fcm/MediaCloud-API-Client | mediacloud/storage.py | 1 | 4358 | import copy
import logging
class StoryDatabase(object):
# callbacks you can register listeners against
EVENT_PRE_STORY_SAVE = "preStorySave"
EVENT_POST_STORY_SAVE = "postStorySave"
def __init__(self):
self._logger = logging.getLogger(__name__)
self._db = None
def storyExists(self, story_id):
raise NotImplementedError("Subclasses should implement this!")
def updateStory(self, story, extra_attributes={}):
# if it is a new story, just add it normally
if not self.storyExists(story['stories_id']):
return self.addStory(story, extra_attributes)
else:
story_to_save = copy.deepcopy(story)
story_to_save.update(extra_attributes)
story_to_save['stories_id'] = story['stories_id']
if 'story_sentences' in story:
story_to_save['story_sentences_count'] = len(story['story_sentences'])
return self._updateStory(story_to_save)
# self.getStory(story['stories_id'])
# self._logger.debug('Updated {}'.format(story['stories_id']))
def addStory(self, story, extra_attributes={}):
# Save a story (python object) to the database. This does NOT update stories.
# Return success or failure boolean.
if self.storyExists(story['stories_id']):
self._logger.info('Not saving {} - already exists'.format(story['stories_id']))
return False
story_to_save = copy.deepcopy(story)
story_to_save.update(extra_attributes)
story_to_save['_stories_id'] = story['stories_id']
if 'story_sentences' in story:
story_to_save['story_sentences_count'] = len(story['story_sentences'])
self._saveStory(story_to_save)
self.getStory(story['stories_id'])
self._logger.debug('Saved {}'.format(story['stories_id']))
return True
def _updateStory(self, story_attributes):
raise NotImplementedError("Subclasses should implement this!")
def _saveStory(self, story_attributes):
raise NotImplementedError("Subclasses should implement this!")
def getStory(self, story_id):
raise NotImplementedError("Subclasses should implement this!")
def storyCount(self):
raise NotImplementedError("Subclasses should implement this!")
def createDatabase(self, db_name):
raise NotImplementedError("Subclasses should implement this!")
def deleteDatabase(self, db_name):
raise NotImplementedError("Subclasses should implement this!")
def getMaxStoryId(self):
raise NotImplementedError("Subclasses should implement this!")
def initialize(self):
raise NotImplementedError("Subclasses should implement this!")
class MongoStoryDatabase(StoryDatabase):
def __init__(self, db_name=None, host='127.0.0.1', port=27017):
super(MongoStoryDatabase, self).__init__()
import pymongo
self._server = pymongo.MongoClient(host, port)
if db_name is not None:
self.selectDatabase(db_name)
def createDatabase(self, db_name):
self.selectDatabase(db_name)
def selectDatabase(self, db_name):
self._db = self._server[db_name]
def deleteDatabase(self, db_name):
self._db.drop_collection('stories')
def storyExists(self, story_id):
story = self.getStory(story_id)
return story is not None
def _updateStory(self, story_attributes):
self._db.stories.update_one({'stories_id': story_attributes['stories_id']}, {'$set': story_attributes})
story = self.getStory(story_attributes['stories_id'])
return story
def _saveStory(self, story_attributes):
self._db.stories.insert_one(story_attributes)
story = self.getStory(story_attributes['stories_id'])
return story
def getStory(self, story_id):
stories = self._db.stories.find({"stories_id": story_id}).limit(1)
try:
return stories.next()
except:
return None
def getMaxStoryId(self):
max_story_id = self._db.stories.find().sort("stories_id", -1)[0]['stories_id']
return int(max_story_id)
def initialize(self):
# nothing to init for mongo
return
def storyCount(self):
return self._db['stories'].count_documents({})
| mit |
JordiCarreraVentura/spellchecker | lib/WordEmbeddings.py | 1 | 1457 | import gensim
from collections import (
Counter,
defaultdict as deft
)
from multiprocessing import cpu_count
from tqdm import tqdm
class WordEmbeddings:
def __init__(
self,
dimensions=100,
window=5,
min_count=1,
workers=1,
# workers=cpu_count(),
max_vocab_size=20000000,
iter=5,
sg=0
):
self.dimensions = dimensions
self.window = window
self.min_count = min_count
self.workers = workers
self.max_vocab_size = max_vocab_size
self.iter = iter
self.sg = sg
self.model = None
self.observed = deft(bool)
def train(self, iterable):
self.model = gensim.models.Word2Vec(
iterable,
size=self.dimensions,
window=self.window,
min_count=self.min_count,
workers=self.workers,
max_vocab_size=self.max_vocab_size,
iter=self.iter,
sg=self.sg
)
def compile(self):
for w in self:
self.observed[w] = True
def __iter__(self):
for w in self.model.vocab.keys():
yield w
def similar(self, w, n=10, r=0.0):
return [
(w, sim)
for w, sim in self.model.most_similar(positive=[w])
if sim >= r
][:n]
def similarity(self, w, v):
return self.model.similarity(w, v)
| gpl-3.0 |
adelton/django | tests/contenttypes_tests/test_models.py | 249 | 12059 | from __future__ import unicode_literals
import warnings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.views import shortcut
from django.contrib.sites.shortcuts import get_current_site
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.http import Http404, HttpRequest
from django.test import TestCase, mock, override_settings
from django.utils import six
from .models import (
ConcreteModel, FooWithBrokenAbsoluteUrl, FooWithoutUrl, FooWithUrl,
ProxyModel,
)
class ContentTypesTests(TestCase):
def setUp(self):
ContentType.objects.clear_cache()
def tearDown(self):
ContentType.objects.clear_cache()
def test_lookup_cache(self):
"""
Make sure that the content type cache (see ContentTypeManager)
works correctly. Lookups for a particular content type -- by model, ID
or natural key -- should hit the database only on the first lookup.
"""
# At this point, a lookup for a ContentType should hit the DB
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# A second hit, though, won't hit the DB, nor will a lookup by ID
# or natural key
with self.assertNumQueries(0):
ct = ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(0):
ContentType.objects.get_for_id(ct.id)
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
# Once we clear the cache, another lookup will again hit the DB
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_for_model(ContentType)
# The same should happen with a lookup by natural key
ContentType.objects.clear_cache()
with self.assertNumQueries(1):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
# And a second hit shouldn't hit the DB
with self.assertNumQueries(0):
ContentType.objects.get_by_natural_key('contenttypes',
'contenttype')
def test_get_for_models_empty_cache(self):
# Empty cache.
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_partial_cache(self):
# Partial cache
ContentType.objects.get_for_model(ContentType)
with self.assertNumQueries(1):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_models_full_cache(self):
# Full cache
ContentType.objects.get_for_model(ContentType)
ContentType.objects.get_for_model(FooWithUrl)
with self.assertNumQueries(0):
cts = ContentType.objects.get_for_models(ContentType, FooWithUrl)
self.assertEqual(cts, {
ContentType: ContentType.objects.get_for_model(ContentType),
FooWithUrl: ContentType.objects.get_for_model(FooWithUrl),
})
def test_get_for_concrete_model(self):
"""
Make sure the `for_concrete_model` kwarg correctly works
with concrete, proxy and deferred models
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(ProxyModel))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(ConcreteModel,
for_concrete_model=False))
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel,
for_concrete_model=False)
self.assertNotEqual(concrete_model_ct, proxy_model_ct)
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredConcreteModel,
for_concrete_model=False))
self.assertEqual(concrete_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel))
self.assertEqual(proxy_model_ct,
ContentType.objects.get_for_model(DeferredProxyModel,
for_concrete_model=False))
def test_get_for_concrete_models(self):
"""
Make sure the `for_concrete_models` kwarg correctly works
with concrete, proxy and deferred models.
"""
concrete_model_ct = ContentType.objects.get_for_model(ConcreteModel)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: concrete_model_ct,
})
proxy_model_ct = ContentType.objects.get_for_model(ProxyModel,
for_concrete_model=False)
cts = ContentType.objects.get_for_models(ConcreteModel, ProxyModel,
for_concrete_models=False)
self.assertEqual(cts, {
ConcreteModel: concrete_model_ct,
ProxyModel: proxy_model_ct,
})
# Make sure deferred model are correctly handled
ConcreteModel.objects.create(name="Concrete")
DeferredConcreteModel = ConcreteModel.objects.only('pk').get().__class__
DeferredProxyModel = ProxyModel.objects.only('pk').get().__class__
cts = ContentType.objects.get_for_models(DeferredConcreteModel,
DeferredProxyModel)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: concrete_model_ct,
})
cts = ContentType.objects.get_for_models(DeferredConcreteModel,
DeferredProxyModel,
for_concrete_models=False)
self.assertEqual(cts, {
DeferredConcreteModel: concrete_model_ct,
DeferredProxyModel: proxy_model_ct,
})
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_shortcut_view(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns a complete URL regardless of whether the sites
framework is installed
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithUrl)
obj = FooWithUrl.objects.create(name="john")
with self.modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://%s/users/john/" % get_current_site(request).domain,
response._headers.get("location")[1])
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
response = shortcut(request, user_ct.id, obj.id)
self.assertEqual("http://Example.com/users/john/",
response._headers.get("location")[1])
def test_shortcut_view_without_get_absolute_url(self):
"""
Check that the shortcut view (used for the admin "view on site"
functionality) returns 404 when get_absolute_url is not defined.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithoutUrl)
obj = FooWithoutUrl.objects.create(name="john")
self.assertRaises(Http404, shortcut, request, user_ct.id, obj.id)
def test_shortcut_view_with_broken_get_absolute_url(self):
"""
Check that the shortcut view does not catch an AttributeError raised
by the model's get_absolute_url method.
Refs #8997.
"""
request = HttpRequest()
request.META = {
"SERVER_NAME": "Example.com",
"SERVER_PORT": "80",
}
user_ct = ContentType.objects.get_for_model(FooWithBrokenAbsoluteUrl)
obj = FooWithBrokenAbsoluteUrl.objects.create(name="john")
self.assertRaises(AttributeError, shortcut, request, user_ct.id, obj.id)
def test_missing_model(self):
"""
Ensures that displaying content types in admin (or anywhere) doesn't
break on leftover content type records in the DB for which no model
is defined anymore.
"""
ct = ContentType.objects.create(
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(six.text_type(ct), 'OldModel')
self.assertIsNone(ct.model_class())
# Make sure stale ContentTypes can be fetched like any other object.
# Before Django 1.6 this caused a NoneType error in the caching mechanism.
# Instead, just return the ContentType object and let the app detect stale states.
ct_fetched = ContentType.objects.get_for_id(ct.pk)
self.assertIsNone(ct_fetched.model_class())
def test_name_deprecation(self):
"""
ContentType.name has been removed. Test that a warning is emitted when
creating a ContentType with a `name`, but the creation should not fail.
"""
with warnings.catch_warnings(record=True) as warns:
warnings.simplefilter('always')
ContentType.objects.create(
name='Name',
app_label='contenttypes',
model='OldModel',
)
self.assertEqual(len(warns), 1)
self.assertEqual(
str(warns[0].message),
"ContentType.name field doesn't exist any longer. Please remove it from your code."
)
self.assertTrue(ContentType.objects.filter(model='OldModel').exists())
@mock.patch('django.contrib.contenttypes.models.ContentTypeManager.get_or_create')
@mock.patch('django.contrib.contenttypes.models.ContentTypeManager.get')
def test_message_if_get_for_model_fails(self, mocked_get, mocked_get_or_create):
"""
Check that `RuntimeError` with nice error message is raised if
`get_for_model` fails because of database errors.
"""
def _test_message(mocked_method):
for ExceptionClass in (IntegrityError, OperationalError, ProgrammingError):
mocked_method.side_effect = ExceptionClass
with self.assertRaisesMessage(
RuntimeError,
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
):
ContentType.objects.get_for_model(ContentType)
_test_message(mocked_get)
mocked_get.side_effect = ContentType.DoesNotExist
_test_message(mocked_get_or_create)
| bsd-3-clause |
DataFighter/Lasagne-tutorial | lasagne/objectives.py | 2 | 6976 | import theano
import theano.tensor as T
from theano.tensor.nnet import binary_crossentropy, categorical_crossentropy
def mse(x, t):
"""Calculates the MSE mean across all dimensions, i.e. feature
dimension AND minibatch dimension.
:parameters:
- x : predicted values
- t : target values
:returns:
- output : the mean square error across all dimensions
"""
return (x - t) ** 2
class Objective(object):
_valid_aggregation = {None, 'mean', 'sum'}
"""
Training objective
The `get_loss` method returns cost expression useful for training or
evaluating a network.
"""
def __init__(self, input_layer, loss_function=mse, aggregation='mean'):
"""
Constructor
:parameters:
- input_layer : a `Layer` whose output is the networks prediction
given its input
- loss_function : a loss function of the form `f(x, t)` that
returns a scalar loss given tensors that represent the
predicted and true values as arguments..
- aggregation : either:
- `'mean'` or `None` : the mean of the the elements of the
loss will be returned
- `'sum'` : the sum of the the elements of the loss will be
returned
"""
self.input_layer = input_layer
self.loss_function = loss_function
self.target_var = T.matrix("target")
if aggregation not in self._valid_aggregation:
raise ValueError('aggregation must be \'mean\', \'sum\', '
'or None, not {0}'.format(aggregation))
self.aggregation = aggregation
def get_loss(self, input=None, target=None, aggregation=None, **kwargs):
"""
Get loss scalar expression
:parameters:
- input : (default `None`) an expression that results in the
input data that is passed to the network
- target : (default `None`) an expression that results in the
desired output that the network is being trained to generate
given the input
- aggregation : None to use the value passed to the
constructor or a value to override it
- kwargs : additional keyword arguments passed to `input_layer`'s
`get_output` method
:returns:
- output : loss expressions
"""
network_output = self.input_layer.get_output(input, **kwargs)
if target is None:
target = self.target_var
if aggregation not in self._valid_aggregation:
raise ValueError('aggregation must be \'mean\', \'sum\', '
'or None, not {0}'.format(aggregation))
if aggregation is None:
aggregation = self.aggregation
losses = self.loss_function(network_output, target)
if aggregation is None or aggregation == 'mean':
return losses.mean()
elif aggregation == 'sum':
return losses.sum()
else:
raise RuntimeError('This should have been caught earlier')
class MaskedObjective(object):
_valid_aggregation = {None, 'sum', 'mean', 'normalized_sum'}
"""
Masked training objective
The `get_loss` method returns an expression that can be used for
training with a gradient descent approach, with masking applied to weight
the contribution of samples to the final loss.
"""
def __init__(self, input_layer, loss_function=mse, aggregation='mean'):
"""
Constructor
:parameters:
- input_layer : a `Layer` whose output is the networks prediction
given its input
- loss_function : a loss function of the form `f(x, t, m)` that
returns a scalar loss given tensors that represent the
predicted values, true values and mask as arguments.
- aggregation : either:
- `None` or `'mean'` : the elements of the loss will be
multiplied by the mask and the mean returned
- `'sum'` : the elements of the loss will be multiplied by
the mask and the sum returned
- `'normalized_sum'` : the elements of the loss will be
multiplied by the mask, summed and divided by the sum of
the mask
"""
self.input_layer = input_layer
self.loss_function = loss_function
self.target_var = T.matrix("target")
self.mask_var = T.matrix("mask")
if aggregation not in self._valid_aggregation:
raise ValueError('aggregation must be \'mean\', \'sum\', '
'\'normalized_sum\' or None,'
' not {0}'.format(aggregation))
self.aggregation = aggregation
def get_loss(self, input=None, target=None, mask=None,
aggregation=None, **kwargs):
"""
Get loss scalar expression
:parameters:
- input : (default `None`) an expression that results in the
input data that is passed to the network
- target : (default `None`) an expression that results in the
desired output that the network is being trained to generate
given the input
- mask : None for no mask, or a soft mask that is the same shape
as - or broadcast-able to the shape of - the result of
applying the loss function. It selects/weights the
contributions of the resulting loss values
- aggregation : None to use the value passed to the
constructor or a value to override it
- kwargs : additional keyword arguments passed to `input_layer`'s
`get_output` method
:returns:
- output : loss expressions
"""
network_output = self.input_layer.get_output(input, **kwargs)
if target is None:
target = self.target_var
if mask is None:
mask = self.mask_var
if aggregation not in self._valid_aggregation:
raise ValueError('aggregation must be \'mean\', \'sum\', '
'\'normalized_sum\' or None, '
'not {0}'.format(aggregation))
# Get aggregation value passed to constructor if None
if aggregation is None:
aggregation = self.aggregation
masked_losses = self.loss_function(network_output, target) * mask
if aggregation is None or aggregation == 'mean':
return masked_losses.mean()
elif aggregation == 'sum':
return masked_losses.sum()
elif aggregation == 'normalized_sum':
return masked_losses.sum() / mask.sum()
else:
raise RuntimeError('This should have been caught earlier')
| mit |
WSDC-NITWarangal/django | tests/template_tests/filter_tests/test_dictsort.py | 342 | 1477 | from django.template.defaultfilters import dictsort
from django.test import SimpleTestCase
class FunctionTests(SimpleTestCase):
def test_sort(self):
sorted_dicts = dictsort(
[{'age': 23, 'name': 'Barbara-Ann'},
{'age': 63, 'name': 'Ra Ra Rasputin'},
{'name': 'Jonny B Goode', 'age': 18}],
'age',
)
self.assertEqual(
[sorted(dict.items()) for dict in sorted_dicts],
[[('age', 18), ('name', 'Jonny B Goode')],
[('age', 23), ('name', 'Barbara-Ann')],
[('age', 63), ('name', 'Ra Ra Rasputin')]],
)
def test_dictsort_complex_sorting_key(self):
"""
Since dictsort uses template.Variable under the hood, it can sort
on keys like 'foo.bar'.
"""
data = [
{'foo': {'bar': 1, 'baz': 'c'}},
{'foo': {'bar': 2, 'baz': 'b'}},
{'foo': {'bar': 3, 'baz': 'a'}},
]
sorted_data = dictsort(data, 'foo.baz')
self.assertEqual([d['foo']['bar'] for d in sorted_data], [3, 2, 1])
def test_invalid_values(self):
"""
If dictsort is passed something other than a list of dictionaries,
fail silently.
"""
self.assertEqual(dictsort([1, 2, 3], 'age'), '')
self.assertEqual(dictsort('Hello!', 'age'), '')
self.assertEqual(dictsort({'a': 1}, 'age'), '')
self.assertEqual(dictsort(1, 'age'), '')
| bsd-3-clause |
playm2mboy/edx-platform | cms/djangoapps/contentstore/features/problem-editor.py | 116 | 12757 | # disable missing docstring
# pylint: disable=missing-docstring
import json
from lettuce import world, step
from nose.tools import assert_equal, assert_true # pylint: disable=no-name-in-module
from common import type_in_codemirror, open_new_course
from advanced_settings import change_value, ADVANCED_MODULES_KEY
from course_import import import_file
DISPLAY_NAME = "Display Name"
MAXIMUM_ATTEMPTS = "Maximum Attempts"
PROBLEM_WEIGHT = "Problem Weight"
RANDOMIZATION = 'Randomization'
SHOW_ANSWER = "Show Answer"
SHOW_RESET_BUTTON = "Show Reset Button"
TIMER_BETWEEN_ATTEMPTS = "Timer Between Attempts"
MATLAB_API_KEY = "Matlab API key"
@step('I have created a Blank Common Problem$')
def i_created_blank_common_problem(step):
step.given('I am in Studio editing a new unit')
step.given("I have created another Blank Common Problem")
@step('I have created a unit with advanced module "(.*)"$')
def i_created_unit_with_advanced_module(step, advanced_module):
step.given('I am in Studio editing a new unit')
url = world.browser.url
step.given("I select the Advanced Settings")
change_value(step, ADVANCED_MODULES_KEY, '["{}"]'.format(advanced_module))
world.visit(url)
world.wait_for_xmodule()
@step('I have created an advanced component "(.*)" of type "(.*)"')
def i_create_new_advanced_component(step, component_type, advanced_component):
world.create_component_instance(
step=step,
category='advanced',
component_type=component_type,
advanced_component=advanced_component
)
@step('I have created another Blank Common Problem$')
def i_create_new_common_problem(step):
world.create_component_instance(
step=step,
category='problem',
component_type='Blank Common Problem'
)
@step('when I mouseover on "(.*)"')
def i_mouseover_on_html_component(step, element_class):
action_css = '.{}'.format(element_class)
world.trigger_event(action_css, event='mouseover')
@step(u'I can see Reply to Annotation link$')
def i_see_reply_to_annotation_link(_step):
css_selector = 'a.annotatable-reply'
world.wait_for_visible(css_selector)
@step(u'I see that page has scrolled "(.*)" when I click on "(.*)" link$')
def i_see_annotation_problem_page_scrolls(_step, scroll_direction, link_css):
scroll_js = "$(window).scrollTop();"
scroll_height_before = world.browser.evaluate_script(scroll_js)
world.css_click("a.{}".format(link_css))
scroll_height_after = world.browser.evaluate_script(scroll_js)
if scroll_direction == "up":
assert scroll_height_after < scroll_height_before
elif scroll_direction == "down":
assert scroll_height_after > scroll_height_before
@step('I have created an advanced problem of type "(.*)"$')
def i_create_new_advanced_problem(step, component_type):
world.create_component_instance(
step=step,
category='problem',
component_type=component_type,
is_advanced=True
)
@step('I edit and select Settings$')
def i_edit_and_select_settings(_step):
world.edit_component_and_select_settings()
@step('I see the advanced settings and their expected values$')
def i_see_advanced_settings_with_values(step):
world.verify_all_setting_entries(
[
[DISPLAY_NAME, "Blank Common Problem", True],
[MATLAB_API_KEY, "", False],
[MAXIMUM_ATTEMPTS, "", False],
[PROBLEM_WEIGHT, "", False],
[RANDOMIZATION, "Never", False],
[SHOW_ANSWER, "Finished", False],
[SHOW_RESET_BUTTON, "False", False],
[TIMER_BETWEEN_ATTEMPTS, "0", False],
])
@step('I can modify the display name')
def i_can_modify_the_display_name(_step):
# Verifying that the display name can be a string containing a floating point value
# (to confirm that we don't throw an error because it is of the wrong type).
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, '3.4')
verify_modified_display_name()
@step('my display name change is persisted on save')
def my_display_name_change_is_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_modified_display_name()
@step('the problem display name is "(.*)"$')
def verify_problem_display_name(step, name):
assert_equal(name.upper(), world.browser.find_by_css('.problem-header').text)
@step('I can specify special characters in the display name')
def i_can_modify_the_display_name_with_special_chars(_step):
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, "updated ' \" &")
verify_modified_display_name_with_special_chars()
@step('I can specify html in the display name and save')
def i_can_modify_the_display_name_with_html(_step):
"""
If alert appear on save then UnexpectedAlertPresentException
will occur and test will fail.
"""
index = world.get_setting_entry_index(DISPLAY_NAME)
world.set_field_value(index, "<script>alert('test')</script>")
verify_modified_display_name_with_html()
world.save_component()
@step('my special characters and persisted on save')
def special_chars_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_modified_display_name_with_special_chars()
@step('I can revert the display name to unset')
def can_revert_display_name_to_unset(_step):
world.revert_setting_entry(DISPLAY_NAME)
verify_unset_display_name()
@step('my display name is unset on save')
def my_display_name_is_persisted_on_save(step):
world.save_component_and_reopen(step)
verify_unset_display_name()
@step('I can select Per Student for Randomization')
def i_can_select_per_student_for_randomization(_step):
world.browser.select(RANDOMIZATION, "Per Student")
verify_modified_randomization()
@step('my change to randomization is persisted')
def my_change_to_randomization_is_persisted(step):
world.save_component_and_reopen(step)
verify_modified_randomization()
@step('I can revert to the default value for randomization')
def i_can_revert_to_default_for_randomization(step):
world.revert_setting_entry(RANDOMIZATION)
world.save_component_and_reopen(step)
world.verify_setting_entry(world.get_setting_entry(RANDOMIZATION), RANDOMIZATION, "Never", False)
@step('I can set the weight to "(.*)"?')
def i_can_set_weight(_step, weight):
set_weight(weight)
verify_modified_weight()
@step('my change to weight is persisted')
def my_change_to_weight_is_persisted(step):
world.save_component_and_reopen(step)
verify_modified_weight()
@step('I can revert to the default value of unset for weight')
def i_can_revert_to_default_for_unset_weight(step):
world.revert_setting_entry(PROBLEM_WEIGHT)
world.save_component_and_reopen(step)
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", False)
@step('if I set the weight to "(.*)", it remains unset')
def set_the_weight_to_abc(step, bad_weight):
set_weight(bad_weight)
# We show the clear button immediately on type, hence the "True" here.
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", True)
world.save_component_and_reopen(step)
# But no change was actually ever sent to the model, so on reopen, explicitly_set is False
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "", False)
@step('if I set the max attempts to "(.*)", it will persist as a valid integer$')
def set_the_max_attempts(step, max_attempts_set):
# on firefox with selenium, the behavior is different.
# eg 2.34 displays as 2.34 and is persisted as 2
index = world.get_setting_entry_index(MAXIMUM_ATTEMPTS)
world.set_field_value(index, max_attempts_set)
world.save_component_and_reopen(step)
value = world.css_value('input.setting-input', index=index)
assert value != "", "max attempts is blank"
assert int(value) >= 0
@step('Edit High Level Source is not visible')
def edit_high_level_source_not_visible(step):
verify_high_level_source_links(step, False)
@step('Edit High Level Source is visible')
def edit_high_level_source_links_visible(step):
verify_high_level_source_links(step, True)
@step('If I press Cancel my changes are not persisted')
def cancel_does_not_save_changes(step):
world.cancel_component(step)
step.given("I edit and select Settings")
step.given("I see the advanced settings and their expected values")
@step('I have enabled latex compiler')
def enable_latex_compiler(step):
url = world.browser.url
step.given("I select the Advanced Settings")
change_value(step, 'Enable LaTeX Compiler', 'true')
world.visit(url)
world.wait_for_xmodule()
@step('I have created a LaTeX Problem')
def create_latex_problem(step):
step.given('I am in Studio editing a new unit')
step.given('I have enabled latex compiler')
world.create_component_instance(
step=step,
category='problem',
component_type='Problem Written in LaTeX',
is_advanced=True
)
@step('I edit and compile the High Level Source')
def edit_latex_source(_step):
open_high_level_source()
type_in_codemirror(1, "hi")
world.css_click('.hls-compile')
@step('my change to the High Level Source is persisted')
def high_level_source_persisted(_step):
def verify_text(driver):
css_sel = '.problem div>span'
return world.css_text(css_sel) == 'hi'
world.wait_for(verify_text, timeout=10)
@step('I view the High Level Source I see my changes')
def high_level_source_in_editor(_step):
open_high_level_source()
assert_equal('hi', world.css_value('.source-edit-box'))
@step(u'I have an empty course')
def i_have_empty_course(step):
open_new_course()
@step(u'I import the file "([^"]*)"$')
def i_import_the_file(_step, filename):
import_file(filename)
@step(u'I go to the vertical "([^"]*)"$')
def i_go_to_vertical(_step, vertical):
world.css_click("span:contains('{0}')".format(vertical))
@step(u'I go to the unit "([^"]*)"$')
def i_go_to_unit(_step, unit):
loc = "window.location = $(\"span:contains('{0}')\").closest('a').attr('href')".format(unit)
world.browser.execute_script(loc)
@step(u'I see a message that says "([^"]*)"$')
def i_can_see_message(_step, msg):
msg = json.dumps(msg) # escape quotes
world.css_has_text("h2.title", msg)
@step(u'I can edit the problem$')
def i_can_edit_problem(_step):
world.edit_component()
@step(u'I edit first blank advanced problem for annotation response$')
def i_edit_blank_problem_for_annotation_response(_step):
world.edit_component(1)
text = """
<problem>
<annotationresponse>
<annotationinput><text>Text of annotation</text></annotationinput>
</annotationresponse>
</problem>"""
type_in_codemirror(0, text)
world.save_component()
@step(u'I can see cheatsheet$')
def verify_cheat_sheet_displaying(_step):
world.css_click("a.cheatsheet-toggle")
css_selector = 'article.simple-editor-cheatsheet'
world.wait_for_visible(css_selector)
def verify_high_level_source_links(step, visible):
if visible:
assert_true(world.is_css_present('.launch-latex-compiler'),
msg="Expected to find the latex button but it is not present.")
else:
assert_true(world.is_css_not_present('.launch-latex-compiler'),
msg="Expected not to find the latex button but it is present.")
world.cancel_component(step)
def verify_modified_weight():
world.verify_setting_entry(world.get_setting_entry(PROBLEM_WEIGHT), PROBLEM_WEIGHT, "3.5", True)
def verify_modified_randomization():
world.verify_setting_entry(world.get_setting_entry(RANDOMIZATION), RANDOMIZATION, "Per Student", True)
def verify_modified_display_name():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, '3.4', True)
def verify_modified_display_name_with_special_chars():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, "updated ' \" &", True)
def verify_modified_display_name_with_html():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, "<script>alert('test')</script>", True)
def verify_unset_display_name():
world.verify_setting_entry(world.get_setting_entry(DISPLAY_NAME), DISPLAY_NAME, 'Blank Advanced Problem', False)
def set_weight(weight):
index = world.get_setting_entry_index(PROBLEM_WEIGHT)
world.set_field_value(index, weight)
def open_high_level_source():
world.edit_component()
world.css_click('.launch-latex-compiler > a')
| agpl-3.0 |
kushalbhola/MyStuff | venv/Lib/site-packages/pip/_vendor/chardet/euckrprober.py | 290 | 1748 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCKRDistributionAnalysis
from .mbcssm import EUCKR_SM_MODEL
class EUCKRProber(MultiByteCharSetProber):
def __init__(self):
super(EUCKRProber, self).__init__()
self.coding_sm = CodingStateMachine(EUCKR_SM_MODEL)
self.distribution_analyzer = EUCKRDistributionAnalysis()
self.reset()
@property
def charset_name(self):
return "EUC-KR"
@property
def language(self):
return "Korean"
| apache-2.0 |
jonparrott/gcloud-python | dns/tests/unit/test_resource_record_set.py | 3 | 3290 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
class TestResourceRecordSet(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.dns.resource_record_set import ResourceRecordSet
return ResourceRecordSet
def _make_one(self, *args, **kw):
return self._get_target_class()(*args, **kw)
def test_ctor(self):
zone = _Zone()
rrs = self._make_one('test.example.com', 'CNAME', 3600,
['www.example.com'], zone)
self.assertEqual(rrs.name, 'test.example.com')
self.assertEqual(rrs.record_type, 'CNAME')
self.assertEqual(rrs.ttl, 3600)
self.assertEqual(rrs.rrdatas, ['www.example.com'])
self.assertIs(rrs.zone, zone)
def test_from_api_repr_missing_rrdatas(self):
zone = _Zone()
klass = self._get_target_class()
with self.assertRaises(KeyError):
klass.from_api_repr({'name': 'test.example.com',
'type': 'CNAME',
'ttl': 3600}, zone=zone)
def test_from_api_repr_missing_ttl(self):
zone = _Zone()
klass = self._get_target_class()
with self.assertRaises(KeyError):
klass.from_api_repr({'name': 'test.example.com',
'type': 'CNAME',
'rrdatas': ['www.example.com']}, zone=zone)
def test_from_api_repr_missing_type(self):
zone = _Zone()
klass = self._get_target_class()
with self.assertRaises(KeyError):
klass.from_api_repr({'name': 'test.example.com',
'ttl': 3600,
'rrdatas': ['www.example.com']}, zone=zone)
def test_from_api_repr_missing_name(self):
zone = _Zone()
klass = self._get_target_class()
with self.assertRaises(KeyError):
klass.from_api_repr({'type': 'CNAME',
'ttl': 3600,
'rrdatas': ['www.example.com']}, zone=zone)
def test_from_api_repr_bare(self):
zone = _Zone()
RESOURCE = {
'kind': 'dns#resourceRecordSet',
'name': 'test.example.com',
'type': 'CNAME',
'ttl': '3600',
'rrdatas': ['www.example.com'],
}
klass = self._get_target_class()
rrs = klass.from_api_repr(RESOURCE, zone=zone)
self.assertEqual(rrs.name, 'test.example.com')
self.assertEqual(rrs.record_type, 'CNAME')
self.assertEqual(rrs.ttl, 3600)
self.assertEqual(rrs.rrdatas, ['www.example.com'])
self.assertIs(rrs.zone, zone)
class _Zone(object):
pass
| apache-2.0 |
mdehollander/bioconda-recipes | recipes/fgbio/fgbio.py | 27 | 3054 | #!/usr/bin/env python
#
# Wrapper script for invoking the jar.
#
# This script is written for use with the Conda package manager.
import subprocess
import sys
import os
from os import access, getenv, path, X_OK
# Expected name of the VarScan JAR file.
JAR_NAME = 'fgbio.jar'
PKG_NAME = 'fgbio'
# Default options passed to the `java` executable.
DEFAULT_JVM_MEM_OPTS = ['-Xms512m', '-Xmx1g']
def real_dirname(in_path):
"""Returns the path to the JAR file"""
realPath = os.path.dirname(os.path.realpath(in_path))
newPath = os.path.realpath(os.path.join(realPath, "..", "share", PKG_NAME))
return newPath
def java_executable():
"""Returns the name of the Java executable."""
java_home = getenv('JAVA_HOME')
java_bin = path.join('bin', 'java')
env_prefix = os.path.dirname(os.path.dirname(real_dirname(sys.argv[0])))
if java_home and access(os.path.join(java_home, java_bin), X_OK):
return os.path.join(java_home, java_bin)
else:
# Use Java installed with Anaconda to ensure correct version
return os.path.join(env_prefix, 'bin', 'java')
def jvm_opts(argv, default_mem_opts=DEFAULT_JVM_MEM_OPTS):
"""Constructs a list of Java arguments based on our argument list.
The argument list passed in argv must not include the script name.
The return value is a 3-tuple lists of strings of the form:
(memory_options, prop_options, passthrough_options)
"""
mem_opts, prop_opts, pass_args = [], [], []
for arg in argv:
if arg.startswith('-D') or arg.startswith('-XX'):
opts_list = prop_opts
elif arg.startswith('-Xm'):
opts_list = mem_opts
else:
opts_list = pass_args
opts_list.append(arg)
if mem_opts == [] and getenv('_JAVA_OPTIONS') is None:
mem_opts = default_mem_opts
return (mem_opts, prop_opts, pass_args)
def main():
java = java_executable()
jar_dir = real_dirname(sys.argv[0])
(mem_opts, prop_opts, pass_args) = jvm_opts(sys.argv[1:])
if pass_args != [] and pass_args[0].startswith('org'):
jar_arg = '-cp'
else:
jar_arg = '-jar'
# If not already set to some value, set MALLOC_ARENA_MAX to constrain the number of memory pools (arenas)
# used by glibc to a reasonable number. The default behaviour is to scale with the number of CPUs, which
# can cause VIRTUAL memory usage to be ~0.5GB per cpu core in the system, e.g. 32GB of a 64-core machine
# even when the heap and resident memory are only 1-4GB! See the following link for more discussion:
# https://www.ibm.com/developerworks/community/blogs/kevgrig/entry/linux_glibc_2_10_rhel_6_malloc_may_show_excessive_virtual_memory_usage?lang=en
if not os.environ.get("MALLOC_ARENA_MAX"):
os.environ["MALLOC_ARENA_MAX"] = "4"
jar_path = path.join(jar_dir, JAR_NAME)
java_args = [java] + mem_opts + prop_opts + [jar_arg] + [jar_path] + pass_args
sys.exit(subprocess.call(java_args))
if __name__ == "__main__":
main()
| mit |
shiminsh/youtube_analytics | youtube/youtube/settings.py | 1 | 2602 | """
Django settings for youtube project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from os.path import join
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'jgexy^5b)wvs1w-jbk0_ndlbm$l(3vyrj@&k(qnhomb8_t(ddi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
CLIENT_ID = '717506766598-bidh9afghbmtb43q4he3nf5ta51q7ejq.apps.googleusercontent.com'
CLIENT_SECRET = 'V9NEosAghz1b6NzC1qTrFNcA'
REDIRECT_URI = 'http://localhost/oauth2callback'
USE_UNIQUE_USER_ID = True
AUTH_EXTRA_ARGUMENTS = {'access_type': 'offline'}
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bootstrap3',
'analytics',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'youtube.urls'
WSGI_APPLICATION = 'youtube.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'OPTIONS': {
'timeout': 500000,
}
}
}
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
| mit |
svn2github/django | tests/regressiontests/comment_tests/tests/comment_view_tests.py | 15 | 11790 | from __future__ import absolute_import
import re
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.comments import signals
from django.contrib.comments.models import Comment
from . import CommentTestCase
from ..models import Article, Book
post_redirect_re = re.compile(r'^http://testserver/posted/\?c=(?P<pk>\d+$)')
class CommentViewTests(CommentTestCase):
def testPostCommentHTTPMethods(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.get("/post/", data)
self.assertEqual(response.status_code, 405)
self.assertEqual(response["Allow"], "POST")
def testPostCommentMissingCtype(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
del data["content_type"]
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostCommentBadCtype(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["content_type"] = "Nobody expects the Spanish Inquisition!"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostCommentMissingObjectPK(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
del data["object_pk"]
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostCommentBadObjectPK(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["object_pk"] = "14"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostInvalidIntegerPK(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["comment"] = "This is another comment"
data["object_pk"] = u'\ufffd'
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testPostInvalidDecimalPK(self):
b = Book.objects.get(pk='12.34')
data = self.getValidData(b)
data["comment"] = "This is another comment"
data["object_pk"] = 'cookies'
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testCommentPreview(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["preview"] = "Preview"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "comments/preview.html")
def testHashTampering(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["security_hash"] = "Nobody expects the Spanish Inquisition!"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
def testDebugCommentErrors(self):
"""The debug error template should be shown only if DEBUG is True"""
olddebug = settings.DEBUG
settings.DEBUG = True
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["security_hash"] = "Nobody expects the Spanish Inquisition!"
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
self.assertTemplateUsed(response, "comments/400-debug.html")
settings.DEBUG = False
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
self.assertTemplateNotUsed(response, "comments/400-debug.html")
settings.DEBUG = olddebug
def testCreateValidComment(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
self.response = self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
self.assertEqual(self.response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.all()[0]
self.assertEqual(c.ip_address, "1.2.3.4")
self.assertEqual(c.comment, "This is my comment")
def testPostAsAuthenticatedUser(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data['name'] = data['email'] = ''
self.client.login(username="normaluser", password="normaluser")
self.response = self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
self.assertEqual(self.response.status_code, 302)
self.assertEqual(Comment.objects.count(), 1)
c = Comment.objects.all()[0]
self.assertEqual(c.ip_address, "1.2.3.4")
u = User.objects.get(username='normaluser')
self.assertEqual(c.user, u)
self.assertEqual(c.user_name, u.get_full_name())
self.assertEqual(c.user_email, u.email)
def testPostAsAuthenticatedUserWithoutFullname(self):
"""
Check that the user's name in the comment is populated for
authenticated users without first_name and last_name.
"""
user = User.objects.create_user(username='jane_other',
email='jane@example.com', password='jane_other')
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data['name'] = data['email'] = ''
self.client.login(username="jane_other", password="jane_other")
self.response = self.client.post("/post/", data, REMOTE_ADDR="1.2.3.4")
c = Comment.objects.get(user=user)
self.assertEqual(c.ip_address, "1.2.3.4")
self.assertEqual(c.user_name, 'jane_other')
user.delete()
def testPreventDuplicateComments(self):
"""Prevent posting the exact same comment twice"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
self.client.post("/post/", data)
self.client.post("/post/", data)
self.assertEqual(Comment.objects.count(), 1)
# This should not trigger the duplicate prevention
self.client.post("/post/", dict(data, comment="My second comment."))
self.assertEqual(Comment.objects.count(), 2)
def testCommentSignals(self):
"""Test signals emitted by the comment posting view"""
# callback
def receive(sender, **kwargs):
self.assertEqual(kwargs['comment'].comment, "This is my comment")
self.assertTrue('request' in kwargs)
received_signals.append(kwargs.get('signal'))
# Connect signals and keep track of handled ones
received_signals = []
expected_signals = [
signals.comment_will_be_posted, signals.comment_was_posted
]
for signal in expected_signals:
signal.connect(receive)
# Post a comment and check the signals
self.testCreateValidComment()
self.assertEqual(received_signals, expected_signals)
for signal in expected_signals:
signal.disconnect(receive)
def testWillBePostedSignal(self):
"""
Test that the comment_will_be_posted signal can prevent the comment from
actually getting saved
"""
def receive(sender, **kwargs): return False
signals.comment_will_be_posted.connect(receive, dispatch_uid="comment-test")
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.post("/post/", data)
self.assertEqual(response.status_code, 400)
self.assertEqual(Comment.objects.count(), 0)
signals.comment_will_be_posted.disconnect(dispatch_uid="comment-test")
def testWillBePostedSignalModifyComment(self):
"""
Test that the comment_will_be_posted signal can modify a comment before
it gets posted
"""
def receive(sender, **kwargs):
# a bad but effective spam filter :)...
kwargs['comment'].is_public = False
signals.comment_will_be_posted.connect(receive)
self.testCreateValidComment()
c = Comment.objects.all()[0]
self.assertFalse(c.is_public)
def testCommentNext(self):
"""Test the different "next" actions the comment view can take"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.post("/post/", data)
location = response["Location"]
match = post_redirect_re.match(location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
data["next"] = "/somewhere/else/"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?c=\d+$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
def testCommentDoneView(self):
a = Article.objects.get(pk=1)
data = self.getValidData(a)
response = self.client.post("/post/", data)
location = response["Location"]
match = post_redirect_re.match(location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
pk = int(match.group('pk'))
response = self.client.get(location)
self.assertTemplateUsed(response, "comments/posted.html")
self.assertEqual(response.context[0]["comment"], Comment.objects.get(pk=pk))
def testCommentNextWithQueryString(self):
"""
The `next` key needs to handle already having a query string (#10585)
"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["next"] = "/somewhere/else/?foo=bar"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?foo=bar&c=\d+$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
def testCommentPostRedirectWithInvalidIntegerPK(self):
"""
Tests that attempting to retrieve the location specified in the
post redirect, after adding some invalid data to the expected
querystring it ends with, doesn't cause a server error.
"""
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
broken_location = location + u"\ufffd"
response = self.client.get(broken_location)
self.assertEqual(response.status_code, 200)
def testCommentNextWithQueryStringAndAnchor(self):
"""
The `next` key needs to handle already having an anchor. Refs #13411.
"""
# With a query string also.
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["next"] = "/somewhere/else/?foo=bar#baz"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?foo=bar&c=\d+#baz$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
# Without a query string
a = Article.objects.get(pk=1)
data = self.getValidData(a)
data["next"] = "/somewhere/else/#baz"
data["comment"] = "This is another comment"
response = self.client.post("/post/", data)
location = response["Location"]
match = re.search(r"^http://testserver/somewhere/else/\?c=\d+#baz$", location)
self.assertTrue(match != None, "Unexpected redirect location: %s" % location)
| bsd-3-clause |
goodwinnk/intellij-community | python/lib/Lib/site-packages/django/core/files/utils.py | 901 | 1230 | class FileProxyMixin(object):
"""
A mixin class used to forward file methods to an underlaying file
object. The internal file object has to be called "file"::
class FileProxy(FileProxyMixin):
def __init__(self, file):
self.file = file
"""
encoding = property(lambda self: self.file.encoding)
fileno = property(lambda self: self.file.fileno)
flush = property(lambda self: self.file.flush)
isatty = property(lambda self: self.file.isatty)
newlines = property(lambda self: self.file.newlines)
read = property(lambda self: self.file.read)
readinto = property(lambda self: self.file.readinto)
readline = property(lambda self: self.file.readline)
readlines = property(lambda self: self.file.readlines)
seek = property(lambda self: self.file.seek)
softspace = property(lambda self: self.file.softspace)
tell = property(lambda self: self.file.tell)
truncate = property(lambda self: self.file.truncate)
write = property(lambda self: self.file.write)
writelines = property(lambda self: self.file.writelines)
xreadlines = property(lambda self: self.file.xreadlines)
def __iter__(self):
return iter(self.file)
| apache-2.0 |
ZachRiegel/scriptbin | pypyjs/modules/unittest/test/test_assertions.py | 52 | 11961 | import datetime
import unittest
class Test_Assertions(unittest.TestCase):
def test_AlmostEqual(self):
self.assertAlmostEqual(1.00000001, 1.0)
self.assertNotAlmostEqual(1.0000001, 1.0)
self.assertRaises(self.failureException,
self.assertAlmostEqual, 1.0000001, 1.0)
self.assertRaises(self.failureException,
self.assertNotAlmostEqual, 1.00000001, 1.0)
self.assertAlmostEqual(1.1, 1.0, places=0)
self.assertRaises(self.failureException,
self.assertAlmostEqual, 1.1, 1.0, places=1)
self.assertAlmostEqual(0, .1+.1j, places=0)
self.assertNotAlmostEqual(0, .1+.1j, places=1)
self.assertRaises(self.failureException,
self.assertAlmostEqual, 0, .1+.1j, places=1)
self.assertRaises(self.failureException,
self.assertNotAlmostEqual, 0, .1+.1j, places=0)
self.assertAlmostEqual(float('inf'), float('inf'))
self.assertRaises(self.failureException, self.assertNotAlmostEqual,
float('inf'), float('inf'))
def test_AmostEqualWithDelta(self):
self.assertAlmostEqual(1.1, 1.0, delta=0.5)
self.assertAlmostEqual(1.0, 1.1, delta=0.5)
self.assertNotAlmostEqual(1.1, 1.0, delta=0.05)
self.assertNotAlmostEqual(1.0, 1.1, delta=0.05)
self.assertAlmostEqual(1.0, 1.0, delta=0.5)
self.assertRaises(self.failureException, self.assertNotAlmostEqual,
1.0, 1.0, delta=0.5)
self.assertRaises(self.failureException, self.assertAlmostEqual,
1.1, 1.0, delta=0.05)
self.assertRaises(self.failureException, self.assertNotAlmostEqual,
1.1, 1.0, delta=0.5)
self.assertRaises(TypeError, self.assertAlmostEqual,
1.1, 1.0, places=2, delta=2)
self.assertRaises(TypeError, self.assertNotAlmostEqual,
1.1, 1.0, places=2, delta=2)
first = datetime.datetime.now()
second = first + datetime.timedelta(seconds=10)
self.assertAlmostEqual(first, second,
delta=datetime.timedelta(seconds=20))
self.assertNotAlmostEqual(first, second,
delta=datetime.timedelta(seconds=5))
def test_assertRaises(self):
def _raise(e):
raise e
self.assertRaises(KeyError, _raise, KeyError)
self.assertRaises(KeyError, _raise, KeyError("key"))
try:
self.assertRaises(KeyError, lambda: None)
except self.failureException as e:
self.assertIn("KeyError not raised", e.args)
else:
self.fail("assertRaises() didn't fail")
try:
self.assertRaises(KeyError, _raise, ValueError)
except ValueError:
pass
else:
self.fail("assertRaises() didn't let exception pass through")
with self.assertRaises(KeyError) as cm:
try:
raise KeyError
except Exception, e:
raise
self.assertIs(cm.exception, e)
with self.assertRaises(KeyError):
raise KeyError("key")
try:
with self.assertRaises(KeyError):
pass
except self.failureException as e:
self.assertIn("KeyError not raised", e.args)
else:
self.fail("assertRaises() didn't fail")
try:
with self.assertRaises(KeyError):
raise ValueError
except ValueError:
pass
else:
self.fail("assertRaises() didn't let exception pass through")
def testAssertNotRegexpMatches(self):
self.assertNotRegexpMatches('Ala ma kota', r'r+')
try:
self.assertNotRegexpMatches('Ala ma kota', r'k.t', 'Message')
except self.failureException, e:
self.assertIn("'kot'", e.args[0])
self.assertIn('Message', e.args[0])
else:
self.fail('assertNotRegexpMatches should have failed.')
class TestLongMessage(unittest.TestCase):
"""Test that the individual asserts honour longMessage.
This actually tests all the message behaviour for
asserts that use longMessage."""
def setUp(self):
class TestableTestFalse(unittest.TestCase):
longMessage = False
failureException = self.failureException
def testTest(self):
pass
class TestableTestTrue(unittest.TestCase):
longMessage = True
failureException = self.failureException
def testTest(self):
pass
self.testableTrue = TestableTestTrue('testTest')
self.testableFalse = TestableTestFalse('testTest')
def testDefault(self):
self.assertFalse(unittest.TestCase.longMessage)
def test_formatMsg(self):
self.assertEqual(self.testableFalse._formatMessage(None, "foo"), "foo")
self.assertEqual(self.testableFalse._formatMessage("foo", "bar"), "foo")
self.assertEqual(self.testableTrue._formatMessage(None, "foo"), "foo")
self.assertEqual(self.testableTrue._formatMessage("foo", "bar"), "bar : foo")
# This blows up if _formatMessage uses string concatenation
self.testableTrue._formatMessage(object(), 'foo')
def test_formatMessage_unicode_error(self):
one = ''.join(chr(i) for i in range(255))
# this used to cause a UnicodeDecodeError constructing msg
self.testableTrue._formatMessage(one, u'\uFFFD')
def assertMessages(self, methodName, args, errors):
def getMethod(i):
useTestableFalse = i < 2
if useTestableFalse:
test = self.testableFalse
else:
test = self.testableTrue
return getattr(test, methodName)
for i, expected_regexp in enumerate(errors):
testMethod = getMethod(i)
kwargs = {}
withMsg = i % 2
if withMsg:
kwargs = {"msg": "oops"}
with self.assertRaisesRegexp(self.failureException,
expected_regexp=expected_regexp):
testMethod(*args, **kwargs)
def testAssertTrue(self):
self.assertMessages('assertTrue', (False,),
["^False is not true$", "^oops$", "^False is not true$",
"^False is not true : oops$"])
def testAssertFalse(self):
self.assertMessages('assertFalse', (True,),
["^True is not false$", "^oops$", "^True is not false$",
"^True is not false : oops$"])
def testNotEqual(self):
self.assertMessages('assertNotEqual', (1, 1),
["^1 == 1$", "^oops$", "^1 == 1$",
"^1 == 1 : oops$"])
def testAlmostEqual(self):
self.assertMessages('assertAlmostEqual', (1, 2),
["^1 != 2 within 7 places$", "^oops$",
"^1 != 2 within 7 places$", "^1 != 2 within 7 places : oops$"])
def testNotAlmostEqual(self):
self.assertMessages('assertNotAlmostEqual', (1, 1),
["^1 == 1 within 7 places$", "^oops$",
"^1 == 1 within 7 places$", "^1 == 1 within 7 places : oops$"])
def test_baseAssertEqual(self):
self.assertMessages('_baseAssertEqual', (1, 2),
["^1 != 2$", "^oops$", "^1 != 2$", "^1 != 2 : oops$"])
def testAssertSequenceEqual(self):
# Error messages are multiline so not testing on full message
# assertTupleEqual and assertListEqual delegate to this method
self.assertMessages('assertSequenceEqual', ([], [None]),
["\+ \[None\]$", "^oops$", r"\+ \[None\]$",
r"\+ \[None\] : oops$"])
def testAssertSetEqual(self):
self.assertMessages('assertSetEqual', (set(), set([None])),
["None$", "^oops$", "None$",
"None : oops$"])
def testAssertIn(self):
self.assertMessages('assertIn', (None, []),
['^None not found in \[\]$', "^oops$",
'^None not found in \[\]$',
'^None not found in \[\] : oops$'])
def testAssertNotIn(self):
self.assertMessages('assertNotIn', (None, [None]),
['^None unexpectedly found in \[None\]$', "^oops$",
'^None unexpectedly found in \[None\]$',
'^None unexpectedly found in \[None\] : oops$'])
def testAssertDictEqual(self):
self.assertMessages('assertDictEqual', ({}, {'key': 'value'}),
[r"\+ \{'key': 'value'\}$", "^oops$",
"\+ \{'key': 'value'\}$",
"\+ \{'key': 'value'\} : oops$"])
def testAssertDictContainsSubset(self):
self.assertMessages('assertDictContainsSubset', ({'key': 'value'}, {}),
["^Missing: 'key'$", "^oops$",
"^Missing: 'key'$",
"^Missing: 'key' : oops$"])
def testAssertMultiLineEqual(self):
self.assertMessages('assertMultiLineEqual', ("", "foo"),
[r"\+ foo$", "^oops$",
r"\+ foo$",
r"\+ foo : oops$"])
def testAssertLess(self):
self.assertMessages('assertLess', (2, 1),
["^2 not less than 1$", "^oops$",
"^2 not less than 1$", "^2 not less than 1 : oops$"])
def testAssertLessEqual(self):
self.assertMessages('assertLessEqual', (2, 1),
["^2 not less than or equal to 1$", "^oops$",
"^2 not less than or equal to 1$",
"^2 not less than or equal to 1 : oops$"])
def testAssertGreater(self):
self.assertMessages('assertGreater', (1, 2),
["^1 not greater than 2$", "^oops$",
"^1 not greater than 2$",
"^1 not greater than 2 : oops$"])
def testAssertGreaterEqual(self):
self.assertMessages('assertGreaterEqual', (1, 2),
["^1 not greater than or equal to 2$", "^oops$",
"^1 not greater than or equal to 2$",
"^1 not greater than or equal to 2 : oops$"])
def testAssertIsNone(self):
self.assertMessages('assertIsNone', ('not None',),
["^'not None' is not None$", "^oops$",
"^'not None' is not None$",
"^'not None' is not None : oops$"])
def testAssertIsNotNone(self):
self.assertMessages('assertIsNotNone', (None,),
["^unexpectedly None$", "^oops$",
"^unexpectedly None$",
"^unexpectedly None : oops$"])
def testAssertIs(self):
self.assertMessages('assertIs', (None, 'foo'),
["^None is not 'foo'$", "^oops$",
"^None is not 'foo'$",
"^None is not 'foo' : oops$"])
def testAssertIsNot(self):
self.assertMessages('assertIsNot', (None, None),
["^unexpectedly identical: None$", "^oops$",
"^unexpectedly identical: None$",
"^unexpectedly identical: None : oops$"])
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
TeslaProject/external_chromium_org | tools/auto_bisect/math_utils.py | 27 | 3966 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""General statistical or mathematical functions."""
import math
def TruncatedMean(data_set, truncate_percent):
"""Calculates the truncated mean of a set of values.
Note that this isn't just the mean of the set of values with the highest
and lowest values discarded; the non-discarded values are also weighted
differently depending how many values are discarded.
Args:
data_set: Non-empty list of values.
truncate_percent: How much of the upper and lower portions of the data set
to discard, expressed as a value in [0, 1].
Returns:
The truncated mean as a float.
Raises:
TypeError: The data set was empty after discarding values.
"""
if len(data_set) > 2:
data_set = sorted(data_set)
discard_num_float = len(data_set) * truncate_percent
discard_num_int = int(math.floor(discard_num_float))
kept_weight = len(data_set) - discard_num_float * 2
data_set = data_set[discard_num_int:len(data_set)-discard_num_int]
weight_left = 1.0 - (discard_num_float - discard_num_int)
if weight_left < 1:
# If the % to discard leaves a fractional portion, need to weight those
# values.
unweighted_vals = data_set[1:len(data_set)-1]
weighted_vals = [data_set[0], data_set[len(data_set)-1]]
weighted_vals = [w * weight_left for w in weighted_vals]
data_set = weighted_vals + unweighted_vals
else:
kept_weight = len(data_set)
truncated_mean = reduce(lambda x, y: float(x) + float(y),
data_set) / kept_weight
return truncated_mean
def Mean(values):
"""Calculates the arithmetic mean of a list of values."""
return TruncatedMean(values, 0.0)
def Variance(values):
"""Calculates the sample variance."""
if len(values) == 1:
return 0.0
mean = Mean(values)
differences_from_mean = [float(x) - mean for x in values]
squared_differences = [float(x * x) for x in differences_from_mean]
variance = sum(squared_differences) / (len(values) - 1)
return variance
def StandardDeviation(values):
"""Calculates the sample standard deviation of the given list of values."""
return math.sqrt(Variance(values))
def RelativeChange(before, after):
"""Returns the relative change of before and after, relative to before.
There are several different ways to define relative difference between
two numbers; sometimes it is defined as relative to the smaller number,
or to the mean of the two numbers. This version returns the difference
relative to the first of the two numbers.
Args:
before: A number representing an earlier value.
after: Another number, representing a later value.
Returns:
A non-negative floating point number; 0.1 represents a 10% change.
"""
if before == after:
return 0.0
if before == 0:
return float('nan')
difference = after - before
return math.fabs(difference / before)
def PooledStandardError(work_sets):
"""Calculates the pooled sample standard error for a set of samples.
Args:
work_sets: A collection of collections of numbers.
Returns:
Pooled sample standard error.
"""
numerator = 0.0
denominator1 = 0.0
denominator2 = 0.0
for current_set in work_sets:
std_dev = StandardDeviation(current_set)
numerator += (len(current_set) - 1) * std_dev ** 2
denominator1 += len(current_set) - 1
if len(current_set) > 0:
denominator2 += 1.0 / len(current_set)
if denominator1 == 0:
return 0.0
return math.sqrt(numerator / denominator1) * math.sqrt(denominator2)
# Redefining built-in 'StandardError'
# pylint: disable=W0622
def StandardError(values):
"""Calculates the standard error of a list of values."""
if len(values) <= 1:
return 0.0
std_dev = StandardDeviation(values)
return std_dev / math.sqrt(len(values))
| bsd-3-clause |
mspark93/VTK | ThirdParty/Twisted/twisted/web/test/test_httpauth.py | 41 | 22445 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web._auth}.
"""
from zope.interface import implements
from zope.interface.verify import verifyObject
from twisted.trial import unittest
from twisted.python.failure import Failure
from twisted.internet.error import ConnectionDone
from twisted.internet.address import IPv4Address
from twisted.cred import error, portal
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.cred.checkers import ANONYMOUS, AllowAnonymousAccess
from twisted.cred.credentials import IUsernamePassword
from twisted.web.iweb import ICredentialFactory
from twisted.web.resource import IResource, Resource, getChildForRequest
from twisted.web._auth import basic, digest
from twisted.web._auth.wrapper import HTTPAuthSessionWrapper, UnauthorizedResource
from twisted.web._auth.basic import BasicCredentialFactory
from twisted.web.server import NOT_DONE_YET
from twisted.web.static import Data
from twisted.web.test.test_web import DummyRequest
def b64encode(s):
return s.encode('base64').strip()
class BasicAuthTestsMixin:
"""
L{TestCase} mixin class which defines a number of tests for
L{basic.BasicCredentialFactory}. Because this mixin defines C{setUp}, it
must be inherited before L{TestCase}.
"""
def setUp(self):
self.request = self.makeRequest()
self.realm = 'foo'
self.username = 'dreid'
self.password = 'S3CuR1Ty'
self.credentialFactory = basic.BasicCredentialFactory(self.realm)
def makeRequest(self, method='GET', clientAddress=None):
"""
Create a request object to be passed to
L{basic.BasicCredentialFactory.decode} along with a response value.
Override this in a subclass.
"""
raise NotImplementedError("%r did not implement makeRequest" % (
self.__class__,))
def test_interface(self):
"""
L{BasicCredentialFactory} implements L{ICredentialFactory}.
"""
self.assertTrue(
verifyObject(ICredentialFactory, self.credentialFactory))
def test_usernamePassword(self):
"""
L{basic.BasicCredentialFactory.decode} turns a base64-encoded response
into a L{UsernamePassword} object with a password which reflects the
one which was encoded in the response.
"""
response = b64encode('%s:%s' % (self.username, self.password))
creds = self.credentialFactory.decode(response, self.request)
self.assertTrue(IUsernamePassword.providedBy(creds))
self.assertTrue(creds.checkPassword(self.password))
self.assertFalse(creds.checkPassword(self.password + 'wrong'))
def test_incorrectPadding(self):
"""
L{basic.BasicCredentialFactory.decode} decodes a base64-encoded
response with incorrect padding.
"""
response = b64encode('%s:%s' % (self.username, self.password))
response = response.strip('=')
creds = self.credentialFactory.decode(response, self.request)
self.assertTrue(verifyObject(IUsernamePassword, creds))
self.assertTrue(creds.checkPassword(self.password))
def test_invalidEncoding(self):
"""
L{basic.BasicCredentialFactory.decode} raises L{LoginFailed} if passed
a response which is not base64-encoded.
"""
response = 'x' # one byte cannot be valid base64 text
self.assertRaises(
error.LoginFailed,
self.credentialFactory.decode, response, self.makeRequest())
def test_invalidCredentials(self):
"""
L{basic.BasicCredentialFactory.decode} raises L{LoginFailed} when
passed a response which is not valid base64-encoded text.
"""
response = b64encode('123abc+/')
self.assertRaises(
error.LoginFailed,
self.credentialFactory.decode,
response, self.makeRequest())
class RequestMixin:
def makeRequest(self, method='GET', clientAddress=None):
"""
Create a L{DummyRequest} (change me to create a
L{twisted.web.http.Request} instead).
"""
request = DummyRequest('/')
request.method = method
request.client = clientAddress
return request
class BasicAuthTestCase(RequestMixin, BasicAuthTestsMixin, unittest.TestCase):
"""
Basic authentication tests which use L{twisted.web.http.Request}.
"""
class DigestAuthTestCase(RequestMixin, unittest.TestCase):
"""
Digest authentication tests which use L{twisted.web.http.Request}.
"""
def setUp(self):
"""
Create a DigestCredentialFactory for testing
"""
self.realm = "test realm"
self.algorithm = "md5"
self.credentialFactory = digest.DigestCredentialFactory(
self.algorithm, self.realm)
self.request = self.makeRequest()
def test_decode(self):
"""
L{digest.DigestCredentialFactory.decode} calls the C{decode} method on
L{twisted.cred.digest.DigestCredentialFactory} with the HTTP method and
host of the request.
"""
host = '169.254.0.1'
method = 'GET'
done = [False]
response = object()
def check(_response, _method, _host):
self.assertEqual(response, _response)
self.assertEqual(method, _method)
self.assertEqual(host, _host)
done[0] = True
self.patch(self.credentialFactory.digest, 'decode', check)
req = self.makeRequest(method, IPv4Address('TCP', host, 81))
self.credentialFactory.decode(response, req)
self.assertTrue(done[0])
def test_interface(self):
"""
L{DigestCredentialFactory} implements L{ICredentialFactory}.
"""
self.assertTrue(
verifyObject(ICredentialFactory, self.credentialFactory))
def test_getChallenge(self):
"""
The challenge issued by L{DigestCredentialFactory.getChallenge} must
include C{'qop'}, C{'realm'}, C{'algorithm'}, C{'nonce'}, and
C{'opaque'} keys. The values for the C{'realm'} and C{'algorithm'}
keys must match the values supplied to the factory's initializer.
None of the values may have newlines in them.
"""
challenge = self.credentialFactory.getChallenge(self.request)
self.assertEqual(challenge['qop'], 'auth')
self.assertEqual(challenge['realm'], 'test realm')
self.assertEqual(challenge['algorithm'], 'md5')
self.assertIn('nonce', challenge)
self.assertIn('opaque', challenge)
for v in challenge.values():
self.assertNotIn('\n', v)
def test_getChallengeWithoutClientIP(self):
"""
L{DigestCredentialFactory.getChallenge} can issue a challenge even if
the L{Request} it is passed returns C{None} from C{getClientIP}.
"""
request = self.makeRequest('GET', None)
challenge = self.credentialFactory.getChallenge(request)
self.assertEqual(challenge['qop'], 'auth')
self.assertEqual(challenge['realm'], 'test realm')
self.assertEqual(challenge['algorithm'], 'md5')
self.assertIn('nonce', challenge)
self.assertIn('opaque', challenge)
class UnauthorizedResourceTests(unittest.TestCase):
"""
Tests for L{UnauthorizedResource}.
"""
def test_getChildWithDefault(self):
"""
An L{UnauthorizedResource} is every child of itself.
"""
resource = UnauthorizedResource([])
self.assertIdentical(
resource.getChildWithDefault("foo", None), resource)
self.assertIdentical(
resource.getChildWithDefault("bar", None), resource)
def _unauthorizedRenderTest(self, request):
"""
Render L{UnauthorizedResource} for the given request object and verify
that the response code is I{Unauthorized} and that a I{WWW-Authenticate}
header is set in the response containing a challenge.
"""
resource = UnauthorizedResource([
BasicCredentialFactory('example.com')])
request.render(resource)
self.assertEqual(request.responseCode, 401)
self.assertEqual(
request.responseHeaders.getRawHeaders('www-authenticate'),
['basic realm="example.com"'])
def test_render(self):
"""
L{UnauthorizedResource} renders with a 401 response code and a
I{WWW-Authenticate} header and puts a simple unauthorized message
into the response body.
"""
request = DummyRequest([''])
self._unauthorizedRenderTest(request)
self.assertEqual('Unauthorized', ''.join(request.written))
def test_renderHEAD(self):
"""
The rendering behavior of L{UnauthorizedResource} for a I{HEAD} request
is like its handling of a I{GET} request, but no response body is
written.
"""
request = DummyRequest([''])
request.method = 'HEAD'
self._unauthorizedRenderTest(request)
self.assertEqual('', ''.join(request.written))
def test_renderQuotesRealm(self):
"""
The realm value included in the I{WWW-Authenticate} header set in
the response when L{UnauthorizedResounrce} is rendered has quotes
and backslashes escaped.
"""
resource = UnauthorizedResource([
BasicCredentialFactory('example\\"foo')])
request = DummyRequest([''])
request.render(resource)
self.assertEqual(
request.responseHeaders.getRawHeaders('www-authenticate'),
['basic realm="example\\\\\\"foo"'])
class Realm(object):
"""
A simple L{IRealm} implementation which gives out L{WebAvatar} for any
avatarId.
@type loggedIn: C{int}
@ivar loggedIn: The number of times C{requestAvatar} has been invoked for
L{IResource}.
@type loggedOut: C{int}
@ivar loggedOut: The number of times the logout callback has been invoked.
"""
implements(portal.IRealm)
def __init__(self, avatarFactory):
self.loggedOut = 0
self.loggedIn = 0
self.avatarFactory = avatarFactory
def requestAvatar(self, avatarId, mind, *interfaces):
if IResource in interfaces:
self.loggedIn += 1
return IResource, self.avatarFactory(avatarId), self.logout
raise NotImplementedError()
def logout(self):
self.loggedOut += 1
class HTTPAuthHeaderTests(unittest.TestCase):
"""
Tests for L{HTTPAuthSessionWrapper}.
"""
makeRequest = DummyRequest
def setUp(self):
"""
Create a realm, portal, and L{HTTPAuthSessionWrapper} to use in the tests.
"""
self.username = 'foo bar'
self.password = 'bar baz'
self.avatarContent = "contents of the avatar resource itself"
self.childName = "foo-child"
self.childContent = "contents of the foo child of the avatar"
self.checker = InMemoryUsernamePasswordDatabaseDontUse()
self.checker.addUser(self.username, self.password)
self.avatar = Data(self.avatarContent, 'text/plain')
self.avatar.putChild(
self.childName, Data(self.childContent, 'text/plain'))
self.avatars = {self.username: self.avatar}
self.realm = Realm(self.avatars.get)
self.portal = portal.Portal(self.realm, [self.checker])
self.credentialFactories = []
self.wrapper = HTTPAuthSessionWrapper(
self.portal, self.credentialFactories)
def _authorizedBasicLogin(self, request):
"""
Add an I{basic authorization} header to the given request and then
dispatch it, starting from C{self.wrapper} and returning the resulting
L{IResource}.
"""
authorization = b64encode(self.username + ':' + self.password)
request.headers['authorization'] = 'Basic ' + authorization
return getChildForRequest(self.wrapper, request)
def test_getChildWithDefault(self):
"""
Resource traversal which encounters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} instance when the request does
not have the required I{Authorization} headers.
"""
request = self.makeRequest([self.childName])
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(result):
self.assertEqual(request.responseCode, 401)
d.addCallback(cbFinished)
request.render(child)
return d
def _invalidAuthorizationTest(self, response):
"""
Create a request with the given value as the value of an
I{Authorization} header and perform resource traversal with it,
starting at C{self.wrapper}. Assert that the result is a 401 response
code. Return a L{Deferred} which fires when this is all done.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
request.headers['authorization'] = response
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(result):
self.assertEqual(request.responseCode, 401)
d.addCallback(cbFinished)
request.render(child)
return d
def test_getChildWithDefaultUnauthorizedUser(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has an
I{Authorization} header with a user which does not exist.
"""
return self._invalidAuthorizationTest('Basic ' + b64encode('foo:bar'))
def test_getChildWithDefaultUnauthorizedPassword(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has an
I{Authorization} header with a user which exists and the wrong
password.
"""
return self._invalidAuthorizationTest(
'Basic ' + b64encode(self.username + ':bar'))
def test_getChildWithDefaultUnrecognizedScheme(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has an
I{Authorization} header with an unrecognized scheme.
"""
return self._invalidAuthorizationTest('Quux foo bar baz')
def test_getChildWithDefaultAuthorized(self):
"""
Resource traversal which encounters an L{HTTPAuthSessionWrapper}
results in an L{IResource} which renders the L{IResource} avatar
retrieved from the portal when the request has a valid I{Authorization}
header.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
child = self._authorizedBasicLogin(request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEqual(request.written, [self.childContent])
d.addCallback(cbFinished)
request.render(child)
return d
def test_renderAuthorized(self):
"""
Resource traversal which terminates at an L{HTTPAuthSessionWrapper}
and includes correct authentication headers results in the
L{IResource} avatar (not one of its children) retrieved from the
portal being rendered.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
# Request it exactly, not any of its children.
request = self.makeRequest([])
child = self._authorizedBasicLogin(request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEqual(request.written, [self.avatarContent])
d.addCallback(cbFinished)
request.render(child)
return d
def test_getChallengeCalledWithRequest(self):
"""
When L{HTTPAuthSessionWrapper} finds an L{ICredentialFactory} to issue
a challenge, it calls the C{getChallenge} method with the request as an
argument.
"""
class DumbCredentialFactory(object):
implements(ICredentialFactory)
scheme = 'dumb'
def __init__(self):
self.requests = []
def getChallenge(self, request):
self.requests.append(request)
return {}
factory = DumbCredentialFactory()
self.credentialFactories.append(factory)
request = self.makeRequest([self.childName])
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEqual(factory.requests, [request])
d.addCallback(cbFinished)
request.render(child)
return d
def _logoutTest(self):
"""
Issue a request for an authentication-protected resource using valid
credentials and then return the C{DummyRequest} instance which was
used.
This is a helper for tests about the behavior of the logout
callback.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
class SlowerResource(Resource):
def render(self, request):
return NOT_DONE_YET
self.avatar.putChild(self.childName, SlowerResource())
request = self.makeRequest([self.childName])
child = self._authorizedBasicLogin(request)
request.render(child)
self.assertEqual(self.realm.loggedOut, 0)
return request
def test_logout(self):
"""
The realm's logout callback is invoked after the resource is rendered.
"""
request = self._logoutTest()
request.finish()
self.assertEqual(self.realm.loggedOut, 1)
def test_logoutOnError(self):
"""
The realm's logout callback is also invoked if there is an error
generating the response (for example, if the client disconnects
early).
"""
request = self._logoutTest()
request.processingFailed(
Failure(ConnectionDone("Simulated disconnect")))
self.assertEqual(self.realm.loggedOut, 1)
def test_decodeRaises(self):
"""
Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
results in an L{UnauthorizedResource} when the request has a I{Basic
Authorization} header which cannot be decoded using base64.
"""
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
request.headers['authorization'] = 'Basic decode should fail'
child = getChildForRequest(self.wrapper, request)
self.assertIsInstance(child, UnauthorizedResource)
def test_selectParseResponse(self):
"""
L{HTTPAuthSessionWrapper._selectParseHeader} returns a two-tuple giving
the L{ICredentialFactory} to use to parse the header and a string
containing the portion of the header which remains to be parsed.
"""
basicAuthorization = 'Basic abcdef123456'
self.assertEqual(
self.wrapper._selectParseHeader(basicAuthorization),
(None, None))
factory = BasicCredentialFactory('example.com')
self.credentialFactories.append(factory)
self.assertEqual(
self.wrapper._selectParseHeader(basicAuthorization),
(factory, 'abcdef123456'))
def test_unexpectedDecodeError(self):
"""
Any unexpected exception raised by the credential factory's C{decode}
method results in a 500 response code and causes the exception to be
logged.
"""
class UnexpectedException(Exception):
pass
class BadFactory(object):
scheme = 'bad'
def getChallenge(self, client):
return {}
def decode(self, response, request):
raise UnexpectedException()
self.credentialFactories.append(BadFactory())
request = self.makeRequest([self.childName])
request.headers['authorization'] = 'Bad abc'
child = getChildForRequest(self.wrapper, request)
request.render(child)
self.assertEqual(request.responseCode, 500)
self.assertEqual(len(self.flushLoggedErrors(UnexpectedException)), 1)
def test_unexpectedLoginError(self):
"""
Any unexpected failure from L{Portal.login} results in a 500 response
code and causes the failure to be logged.
"""
class UnexpectedException(Exception):
pass
class BrokenChecker(object):
credentialInterfaces = (IUsernamePassword,)
def requestAvatarId(self, credentials):
raise UnexpectedException()
self.portal.registerChecker(BrokenChecker())
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
child = self._authorizedBasicLogin(request)
request.render(child)
self.assertEqual(request.responseCode, 500)
self.assertEqual(len(self.flushLoggedErrors(UnexpectedException)), 1)
def test_anonymousAccess(self):
"""
Anonymous requests are allowed if a L{Portal} has an anonymous checker
registered.
"""
unprotectedContents = "contents of the unprotected child resource"
self.avatars[ANONYMOUS] = Resource()
self.avatars[ANONYMOUS].putChild(
self.childName, Data(unprotectedContents, 'text/plain'))
self.portal.registerChecker(AllowAnonymousAccess())
self.credentialFactories.append(BasicCredentialFactory('example.com'))
request = self.makeRequest([self.childName])
child = getChildForRequest(self.wrapper, request)
d = request.notifyFinish()
def cbFinished(ignored):
self.assertEqual(request.written, [unprotectedContents])
d.addCallback(cbFinished)
request.render(child)
return d
| bsd-3-clause |
TwilioDevEd/api-snippets | quickstart/python/autopilot/create-joke-samples/create_joke_samples.6.x.py | 1 | 1102 | # Download the helper library from https://www.twilio.com/docs/python/install
import os
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
# To set up environmental variables, see http://twil.io/secure
account_sid = os.environ['TWILIO_ACCOUNT_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
client = Client(account_sid, auth_token)
phrases = [
'Tell me a joke',
'Tell me a joke',
'I\'d like to hear a joke',
'Do you know any good jokes?',
'Joke',
'Tell joke',
'Tell me something funny',
'Make me laugh',
'I want to hear a joke',
'Can I hear a joke?',
'I like jokes',
'I\'d like to hear a punny joke'
]
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
# Replace 'UDXXX...' with the SID for the task you just created.
for phrase in phrases:
sample = client.autopilot \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.tasks('tell-a-joke') \
.samples \
.create(language='en-us', tagged_text=phrase)
print(sample.sid)
| mit |
sdoran35/hate-to-hugs | venv/lib/python3.6/site-packages/pip/utils/glibc.py | 350 | 2939 | from __future__ import absolute_import
import re
import ctypes
import platform
import warnings
def glibc_version_string():
"Returns glibc version string, or None if not using glibc."
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
process_namespace = ctypes.CDLL(None)
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
# Separated out from have_compatible_glibc for easier unit testing
def check_glibc_version(version_str, required_major, minimum_minor):
# Parse string and check against requested version.
#
# We use a regexp instead of str.split because we want to discard any
# random junk that might come after the minor version -- this might happen
# in patched/forked versions of glibc (e.g. Linaro's version of glibc
# uses version strings like "2.20-2014.11"). See gh-3588.
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
if not m:
warnings.warn("Expected glibc version with 2 components major.minor,"
" got: %s" % version_str, RuntimeWarning)
return False
return (int(m.group("major")) == required_major and
int(m.group("minor")) >= minimum_minor)
def have_compatible_glibc(required_major, minimum_minor):
version_str = glibc_version_string()
if version_str is None:
return False
return check_glibc_version(version_str, required_major, minimum_minor)
# platform.libc_ver regularly returns completely nonsensical glibc
# versions. E.g. on my computer, platform says:
#
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.7')
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.9')
#
# But the truth is:
#
# ~$ ldd --version
# ldd (Debian GLIBC 2.22-11) 2.22
#
# This is unfortunate, because it means that the linehaul data on libc
# versions that was generated by pip 8.1.2 and earlier is useless and
# misleading. Solution: instead of using platform, use our code that actually
# works.
def libc_ver():
glibc_version = glibc_version_string()
if glibc_version is None:
# For non-glibc platforms, fall back on platform.libc_ver
return platform.libc_ver()
else:
return ("glibc", glibc_version)
| mit |
felixma/nova | nova/virt/xenapi/fake.py | 47 | 38557 | # Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Parts of this file are based upon xmlrpclib.py, the XML-RPC client
# interface included in the Python distribution.
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
"""
A fake XenAPI SDK.
"""
import base64
import pickle
import random
import uuid
from xml.sax import saxutils
import zlib
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import units
import six
from nova import exception
from nova.i18n import _
from nova.virt.xenapi.client import session as xenapi_session
_CLASSES = ['host', 'network', 'session', 'pool', 'SR', 'VBD',
'PBD', 'VDI', 'VIF', 'PIF', 'VM', 'VLAN', 'task']
_db_content = {}
LOG = logging.getLogger(__name__)
def reset():
for c in _CLASSES:
_db_content[c] = {}
host = create_host('fake')
create_vm('fake dom 0',
'Running',
is_a_template=False,
is_control_domain=True,
resident_on=host)
def reset_table(table):
if table not in _CLASSES:
return
_db_content[table] = {}
def _create_pool(name_label):
return _create_object('pool',
{'name_label': name_label})
def create_host(name_label, hostname='fake_name', address='fake_addr'):
host_ref = _create_object('host',
{'name_label': name_label,
'hostname': hostname,
'address': address})
host_default_sr_ref = _create_local_srs(host_ref)
_create_local_pif(host_ref)
# Create a pool if we don't have one already
if len(_db_content['pool']) == 0:
pool_ref = _create_pool('')
_db_content['pool'][pool_ref]['master'] = host_ref
_db_content['pool'][pool_ref]['default-SR'] = host_default_sr_ref
_db_content['pool'][pool_ref]['suspend-image-SR'] = host_default_sr_ref
def create_network(name_label, bridge):
return _create_object('network',
{'name_label': name_label,
'bridge': bridge})
def create_vm(name_label, status, **kwargs):
if status == 'Running':
domid = random.randrange(1, 1 << 16)
resident_on = _db_content['host'].keys()[0]
else:
domid = -1
resident_on = ''
vm_rec = kwargs.copy()
vm_rec.update({'name_label': name_label,
'domid': domid,
'power_state': status,
'blocked_operations': {},
'resident_on': resident_on})
vm_ref = _create_object('VM', vm_rec)
after_VM_create(vm_ref, vm_rec)
return vm_ref
def destroy_vm(vm_ref):
vm_rec = _db_content['VM'][vm_ref]
vbd_refs = vm_rec['VBDs']
# NOTE(johannes): Shallow copy since destroy_vbd will remove itself
# from the list
for vbd_ref in vbd_refs[:]:
destroy_vbd(vbd_ref)
del _db_content['VM'][vm_ref]
def destroy_vbd(vbd_ref):
vbd_rec = _db_content['VBD'][vbd_ref]
vm_ref = vbd_rec['VM']
vm_rec = _db_content['VM'][vm_ref]
vm_rec['VBDs'].remove(vbd_ref)
vdi_ref = vbd_rec['VDI']
vdi_rec = _db_content['VDI'][vdi_ref]
vdi_rec['VBDs'].remove(vbd_ref)
del _db_content['VBD'][vbd_ref]
def destroy_vdi(vdi_ref):
vdi_rec = _db_content['VDI'][vdi_ref]
vbd_refs = vdi_rec['VBDs']
# NOTE(johannes): Shallow copy since destroy_vbd will remove itself
# from the list
for vbd_ref in vbd_refs[:]:
destroy_vbd(vbd_ref)
del _db_content['VDI'][vdi_ref]
def create_vdi(name_label, sr_ref, **kwargs):
vdi_rec = {
'SR': sr_ref,
'read_only': False,
'type': '',
'name_label': name_label,
'name_description': '',
'sharable': False,
'other_config': {},
'location': '',
'xenstore_data': {},
'sm_config': {'vhd-parent': None},
'physical_utilisation': '123',
'managed': True,
}
vdi_rec.update(kwargs)
vdi_ref = _create_object('VDI', vdi_rec)
after_VDI_create(vdi_ref, vdi_rec)
return vdi_ref
def after_VDI_create(vdi_ref, vdi_rec):
vdi_rec.setdefault('VBDs', [])
def create_vbd(vm_ref, vdi_ref, userdevice=0, other_config=None):
if other_config is None:
other_config = {}
vbd_rec = {'VM': vm_ref,
'VDI': vdi_ref,
'userdevice': str(userdevice),
'currently_attached': False,
'other_config': other_config}
vbd_ref = _create_object('VBD', vbd_rec)
after_VBD_create(vbd_ref, vbd_rec)
return vbd_ref
def after_VBD_create(vbd_ref, vbd_rec):
"""Create read-only fields and backref from VM and VDI to VBD when VBD
is created.
"""
vbd_rec['currently_attached'] = False
vbd_rec['device'] = ''
vbd_rec.setdefault('other_config', {})
vm_ref = vbd_rec['VM']
vm_rec = _db_content['VM'][vm_ref]
vm_rec['VBDs'].append(vbd_ref)
vm_name_label = _db_content['VM'][vm_ref]['name_label']
vbd_rec['vm_name_label'] = vm_name_label
vdi_ref = vbd_rec['VDI']
if vdi_ref and vdi_ref != "OpaqueRef:NULL":
vdi_rec = _db_content['VDI'][vdi_ref]
vdi_rec['VBDs'].append(vbd_ref)
def after_VIF_create(vif_ref, vif_rec):
"""Create backref from VM to VIF when VIF is created.
"""
vm_ref = vif_rec['VM']
vm_rec = _db_content['VM'][vm_ref]
vm_rec['VIFs'].append(vif_ref)
def after_VM_create(vm_ref, vm_rec):
"""Create read-only fields in the VM record."""
vm_rec.setdefault('domid', -1)
vm_rec.setdefault('is_control_domain', False)
vm_rec.setdefault('is_a_template', False)
vm_rec.setdefault('memory_static_max', str(8 * units.Gi))
vm_rec.setdefault('memory_dynamic_max', str(8 * units.Gi))
vm_rec.setdefault('VCPUs_max', str(4))
vm_rec.setdefault('VBDs', [])
vm_rec.setdefault('VIFs', [])
vm_rec.setdefault('resident_on', '')
def create_pbd(host_ref, sr_ref, attached):
config = {'path': '/var/run/sr-mount/%s' % sr_ref}
return _create_object('PBD',
{'device_config': config,
'host': host_ref,
'SR': sr_ref,
'currently_attached': attached})
def create_task(name_label):
return _create_object('task',
{'name_label': name_label,
'status': 'pending'})
def _create_local_srs(host_ref):
"""Create an SR that looks like the one created on the local disk by
default by the XenServer installer. Also, fake the installation of
an ISO SR.
"""
create_sr(name_label='Local storage ISO',
type='iso',
other_config={'i18n-original-value-name_label':
'Local storage ISO',
'i18n-key': 'local-storage-iso'},
physical_size=80000,
physical_utilisation=40000,
virtual_allocation=80000,
host_ref=host_ref)
return create_sr(name_label='Local storage',
type='ext',
other_config={'i18n-original-value-name_label':
'Local storage',
'i18n-key': 'local-storage'},
physical_size=40000,
physical_utilisation=20000,
virtual_allocation=10000,
host_ref=host_ref)
def create_sr(**kwargs):
sr_ref = _create_object(
'SR',
{'name_label': kwargs.get('name_label'),
'type': kwargs.get('type'),
'content_type': kwargs.get('type', 'user'),
'shared': kwargs.get('shared', False),
'physical_size': kwargs.get('physical_size', str(1 << 30)),
'physical_utilisation': str(
kwargs.get('physical_utilisation', 0)),
'virtual_allocation': str(kwargs.get('virtual_allocation', 0)),
'other_config': kwargs.get('other_config', {}),
'VDIs': kwargs.get('VDIs', [])})
pbd_ref = create_pbd(kwargs.get('host_ref'), sr_ref, True)
_db_content['SR'][sr_ref]['PBDs'] = [pbd_ref]
return sr_ref
def _create_local_pif(host_ref):
pif_ref = _create_object('PIF',
{'name-label': 'Fake PIF',
'MAC': '00:11:22:33:44:55',
'physical': True,
'VLAN': -1,
'device': 'fake0',
'host_uuid': host_ref,
'network': '',
'IP': '10.1.1.1',
'IPv6': '',
'uuid': '',
'management': 'true'})
_db_content['PIF'][pif_ref]['uuid'] = pif_ref
return pif_ref
def _create_object(table, obj):
ref = str(uuid.uuid4())
obj['uuid'] = str(uuid.uuid4())
_db_content[table][ref] = obj
return ref
def _create_sr(table, obj):
sr_type = obj[6]
# Forces fake to support iscsi only
if sr_type != 'iscsi' and sr_type != 'nfs':
raise Failure(['SR_UNKNOWN_DRIVER', sr_type])
host_ref = _db_content['host'].keys()[0]
sr_ref = _create_object(table, obj[2])
if sr_type == 'iscsi':
vdi_ref = create_vdi('', sr_ref)
pbd_ref = create_pbd(host_ref, sr_ref, True)
_db_content['SR'][sr_ref]['VDIs'] = [vdi_ref]
_db_content['SR'][sr_ref]['PBDs'] = [pbd_ref]
_db_content['VDI'][vdi_ref]['SR'] = sr_ref
_db_content['PBD'][pbd_ref]['SR'] = sr_ref
return sr_ref
def _create_vlan(pif_ref, vlan_num, network_ref):
pif_rec = get_record('PIF', pif_ref)
vlan_pif_ref = _create_object('PIF',
{'name-label': 'Fake VLAN PIF',
'MAC': '00:11:22:33:44:55',
'physical': True,
'VLAN': vlan_num,
'device': pif_rec['device'],
'host_uuid': pif_rec['host_uuid']})
return _create_object('VLAN',
{'tagged-pif': pif_ref,
'untagged-pif': vlan_pif_ref,
'tag': vlan_num})
def get_all(table):
return _db_content[table].keys()
def get_all_records(table):
return _db_content[table]
def _query_matches(record, query):
# Simple support for the XenServer query language:
# 'field "host"="<uuid>" and field "SR"="<sr uuid>"'
# Tested through existing tests (e.g. calls to find_network_with_bridge)
and_clauses = query.split(" and ")
if len(and_clauses) > 1:
matches = True
for clause in and_clauses:
matches = matches and _query_matches(record, clause)
return matches
or_clauses = query.split(" or ")
if len(or_clauses) > 1:
matches = False
for clause in or_clauses:
matches = matches or _query_matches(record, clause)
return matches
if query[:4] == 'not ':
return not _query_matches(record, query[4:])
# Now it must be a single field - bad queries never match
if query[:5] != 'field':
return False
(field, value) = query[6:].split('=', 1)
# Some fields (e.g. name_label, memory_overhead) have double
# underscores in the DB, but only single underscores when querying
field = field.replace("__", "_").strip(" \"'")
value = value.strip(" \"'")
# Strings should be directly compared
if isinstance(record[field], str):
return record[field] == value
# But for all other value-checks, convert to a string first
# (Notably used for booleans - which can be lower or camel
# case and are interpreted/sanitised by XAPI)
return str(record[field]).lower() == value.lower()
def get_all_records_where(table_name, query):
matching_records = {}
table = _db_content[table_name]
for record in table:
if _query_matches(table[record], query):
matching_records[record] = table[record]
return matching_records
def get_record(table, ref):
if ref in _db_content[table]:
return _db_content[table].get(ref)
else:
raise Failure(['HANDLE_INVALID', table, ref])
def check_for_session_leaks():
if len(_db_content['session']) > 0:
raise exception.NovaException('Sessions have leaked: %s' %
_db_content['session'])
def as_value(s):
"""Helper function for simulating XenAPI plugin responses. It
escapes and wraps the given argument.
"""
return '<value>%s</value>' % saxutils.escape(s)
def as_json(*args, **kwargs):
"""Helper function for simulating XenAPI plugin responses for those
that are returning JSON. If this function is given plain arguments,
then these are rendered as a JSON list. If it's given keyword
arguments then these are rendered as a JSON dict.
"""
arg = args or kwargs
return jsonutils.dumps(arg)
class Failure(Exception):
def __init__(self, details):
self.details = details
def __str__(self):
try:
return str(self.details)
except Exception:
return "XenAPI Fake Failure: %s" % str(self.details)
def _details_map(self):
return {str(i): self.details[i] for i in range(len(self.details))}
class SessionBase(object):
"""Base class for Fake Sessions."""
def __init__(self, uri):
self._session = None
xenapi_session.apply_session_helpers(self)
def pool_get_default_SR(self, _1, pool_ref):
return _db_content['pool'].values()[0]['default-SR']
def VBD_insert(self, _1, vbd_ref, vdi_ref):
vbd_rec = get_record('VBD', vbd_ref)
get_record('VDI', vdi_ref)
vbd_rec['empty'] = False
vbd_rec['VDI'] = vdi_ref
def VBD_plug(self, _1, ref):
rec = get_record('VBD', ref)
if rec['currently_attached']:
raise Failure(['DEVICE_ALREADY_ATTACHED', ref])
rec['currently_attached'] = True
rec['device'] = rec['userdevice']
def VBD_unplug(self, _1, ref):
rec = get_record('VBD', ref)
if not rec['currently_attached']:
raise Failure(['DEVICE_ALREADY_DETACHED', ref])
rec['currently_attached'] = False
rec['device'] = ''
def VBD_add_to_other_config(self, _1, vbd_ref, key, value):
db_ref = _db_content['VBD'][vbd_ref]
if 'other_config' not in db_ref:
db_ref['other_config'] = {}
if key in db_ref['other_config']:
raise Failure(['MAP_DUPLICATE_KEY', 'VBD', 'other_config',
vbd_ref, key])
db_ref['other_config'][key] = value
def VBD_get_other_config(self, _1, vbd_ref):
db_ref = _db_content['VBD'][vbd_ref]
if 'other_config' not in db_ref:
return {}
return db_ref['other_config']
def PBD_create(self, _1, pbd_rec):
pbd_ref = _create_object('PBD', pbd_rec)
_db_content['PBD'][pbd_ref]['currently_attached'] = False
return pbd_ref
def PBD_plug(self, _1, pbd_ref):
rec = get_record('PBD', pbd_ref)
if rec['currently_attached']:
raise Failure(['DEVICE_ALREADY_ATTACHED', rec])
rec['currently_attached'] = True
sr_ref = rec['SR']
_db_content['SR'][sr_ref]['PBDs'] = [pbd_ref]
def PBD_unplug(self, _1, pbd_ref):
rec = get_record('PBD', pbd_ref)
if not rec['currently_attached']:
raise Failure(['DEVICE_ALREADY_DETACHED', rec])
rec['currently_attached'] = False
sr_ref = rec['SR']
_db_content['SR'][sr_ref]['PBDs'].remove(pbd_ref)
def SR_introduce(self, _1, sr_uuid, label, desc, type, content_type,
shared, sm_config):
for ref, rec in six.iteritems(_db_content['SR']):
if rec.get('uuid') == sr_uuid:
# make forgotten = 0 and return ref
_db_content['SR'][ref]['forgotten'] = 0
return ref
# SR not found in db, so we create one
params = {'sr_uuid': sr_uuid,
'label': label,
'desc': desc,
'type': type,
'content_type': content_type,
'shared': shared,
'sm_config': sm_config}
sr_ref = _create_object('SR', params)
_db_content['SR'][sr_ref]['uuid'] = sr_uuid
_db_content['SR'][sr_ref]['forgotten'] = 0
vdi_per_lun = False
if type == 'iscsi':
# Just to be clear
vdi_per_lun = True
if vdi_per_lun:
# we need to create a vdi because this introduce
# is likely meant for a single vdi
vdi_ref = create_vdi('', sr_ref)
_db_content['SR'][sr_ref]['VDIs'] = [vdi_ref]
_db_content['VDI'][vdi_ref]['SR'] = sr_ref
return sr_ref
def SR_forget(self, _1, sr_ref):
_db_content['SR'][sr_ref]['forgotten'] = 1
def SR_scan(self, _1, sr_ref):
return
def VM_get_xenstore_data(self, _1, vm_ref):
return _db_content['VM'][vm_ref].get('xenstore_data', {})
def VM_remove_from_xenstore_data(self, _1, vm_ref, key):
db_ref = _db_content['VM'][vm_ref]
if 'xenstore_data' not in db_ref:
return
if key in db_ref['xenstore_data']:
del db_ref['xenstore_data'][key]
def VM_add_to_xenstore_data(self, _1, vm_ref, key, value):
db_ref = _db_content['VM'][vm_ref]
if 'xenstore_data' not in db_ref:
db_ref['xenstore_data'] = {}
db_ref['xenstore_data'][key] = value
def VM_pool_migrate(self, _1, vm_ref, host_ref, options):
pass
def VDI_remove_from_other_config(self, _1, vdi_ref, key):
db_ref = _db_content['VDI'][vdi_ref]
if 'other_config' not in db_ref:
return
if key in db_ref['other_config']:
del db_ref['other_config'][key]
def VDI_add_to_other_config(self, _1, vdi_ref, key, value):
db_ref = _db_content['VDI'][vdi_ref]
if 'other_config' not in db_ref:
db_ref['other_config'] = {}
if key in db_ref['other_config']:
raise Failure(['MAP_DUPLICATE_KEY', 'VDI', 'other_config',
vdi_ref, key])
db_ref['other_config'][key] = value
def VDI_copy(self, _1, vdi_to_copy_ref, sr_ref):
db_ref = _db_content['VDI'][vdi_to_copy_ref]
name_label = db_ref['name_label']
read_only = db_ref['read_only']
sharable = db_ref['sharable']
other_config = db_ref['other_config'].copy()
return create_vdi(name_label, sr_ref, sharable=sharable,
read_only=read_only, other_config=other_config)
def VDI_clone(self, _1, vdi_to_clone_ref):
db_ref = _db_content['VDI'][vdi_to_clone_ref]
sr_ref = db_ref['SR']
return self.VDI_copy(_1, vdi_to_clone_ref, sr_ref)
def host_compute_free_memory(self, _1, ref):
# Always return 12GB available
return 12 * units.Gi
def _plugin_agent_version(self, method, args):
return as_json(returncode='0', message='1.0\\r\\n')
def _plugin_agent_key_init(self, method, args):
return as_json(returncode='D0', message='1')
def _plugin_agent_password(self, method, args):
return as_json(returncode='0', message='success')
def _plugin_agent_inject_file(self, method, args):
return as_json(returncode='0', message='success')
def _plugin_agent_resetnetwork(self, method, args):
return as_json(returncode='0', message='success')
def _plugin_agent_agentupdate(self, method, args):
url = args["url"]
md5 = args["md5sum"]
message = "success with %(url)s and hash:%(md5)s" % dict(url=url,
md5=md5)
return as_json(returncode='0', message=message)
def _plugin_noop(self, method, args):
return ''
def _plugin_pickle_noop(self, method, args):
return pickle.dumps(None)
def _plugin_migration_transfer_vhd(self, method, args):
kwargs = pickle.loads(args['params'])['kwargs']
vdi_ref = self.xenapi_request('VDI.get_by_uuid',
(kwargs['vdi_uuid'], ))
assert vdi_ref
return pickle.dumps(None)
_plugin_glance_upload_vhd = _plugin_pickle_noop
_plugin_kernel_copy_vdi = _plugin_noop
_plugin_kernel_create_kernel_ramdisk = _plugin_noop
_plugin_kernel_remove_kernel_ramdisk = _plugin_noop
_plugin_migration_move_vhds_into_sr = _plugin_noop
def _plugin_xenhost_host_data(self, method, args):
return jsonutils.dumps({
'host_memory': {'total': 10,
'overhead': 20,
'free': 30,
'free-computed': 40},
'host_uuid': 'fb97583b-baa1-452d-850e-819d95285def',
'host_name-label': 'fake-xenhost',
'host_name-description': 'Default install of XenServer',
'host_hostname': 'fake-xenhost',
'host_ip_address': '10.219.10.24',
'enabled': 'true',
'host_capabilities': ['xen-3.0-x86_64',
'xen-3.0-x86_32p',
'hvm-3.0-x86_32',
'hvm-3.0-x86_32p',
'hvm-3.0-x86_64'],
'host_other-config': {
'agent_start_time': '1412774967.',
'iscsi_iqn': 'iqn.2014-10.org.example:39fa9ee3',
'boot_time': '1412774885.',
},
'host_cpu_info': {
'physical_features': '0098e3fd-bfebfbff-00000001-28100800',
'modelname': 'Intel(R) Xeon(R) CPU X3430 @ 2.40GHz',
'vendor': 'GenuineIntel',
'features': '0098e3fd-bfebfbff-00000001-28100800',
'family': 6,
'maskable': 'full',
'cpu_count': 4,
'socket_count': '1',
'flags': 'fpu de tsc msr pae mce cx8 apic sep mtrr mca '
'cmov pat clflush acpi mmx fxsr sse sse2 ss ht '
'nx constant_tsc nonstop_tsc aperfmperf pni vmx '
'est ssse3 sse4_1 sse4_2 popcnt hypervisor ida '
'tpr_shadow vnmi flexpriority ept vpid',
'stepping': 5,
'model': 30,
'features_after_reboot': '0098e3fd-bfebfbff-00000001-28100800',
'speed': '2394.086'
},
})
def _plugin_poweraction(self, method, args):
return jsonutils.dumps({"power_action": method[5:]})
_plugin_xenhost_host_reboot = _plugin_poweraction
_plugin_xenhost_host_startup = _plugin_poweraction
_plugin_xenhost_host_shutdown = _plugin_poweraction
def _plugin_xenhost_set_host_enabled(self, method, args):
enabled = 'enabled' if args.get('enabled') == 'true' else 'disabled'
return jsonutils.dumps({"status": enabled})
def _plugin_xenhost_host_uptime(self, method, args):
return jsonutils.dumps({"uptime": "fake uptime"})
def _plugin_xenhost_get_pci_device_details(self, method, args):
"""Simulate the ouput of three pci devices.
Both of those devices are available for pci passtrough but
only one will match with the pci whitelist used in the
method test_pci_passthrough_devices_*().
Return a single list.
"""
# Driver is not pciback
dev_bad1 = ["Slot:\t0000:86:10.0", "Class:\t0604", "Vendor:\t10b5",
"Device:\t8747", "Rev:\tba", "Driver:\tpcieport", "\n"]
# Driver is pciback but vendor and device are bad
dev_bad2 = ["Slot:\t0000:88:00.0", "Class:\t0300", "Vendor:\t0bad",
"Device:\tcafe", "SVendor:\t10de", "SDevice:\t100d",
"Rev:\ta1", "Driver:\tpciback", "\n"]
# Driver is pciback and vendor, device are used for matching
dev_good = ["Slot:\t0000:87:00.0", "Class:\t0300", "Vendor:\t10de",
"Device:\t11bf", "SVendor:\t10de", "SDevice:\t100d",
"Rev:\ta1", "Driver:\tpciback", "\n"]
lspci_output = "\n".join(dev_bad1 + dev_bad2 + dev_good)
return pickle.dumps(lspci_output)
def _plugin_xenhost_get_pci_type(self, method, args):
return pickle.dumps("type-PCI")
def _plugin_console_get_console_log(self, method, args):
dom_id = args["dom_id"]
if dom_id == 0:
raise Failure('Guest does not have a console')
return base64.b64encode(zlib.compress("dom_id: %s" % dom_id))
def _plugin_nova_plugin_version_get_version(self, method, args):
return pickle.dumps("1.2")
def _plugin_xenhost_query_gc(self, method, args):
return pickle.dumps("False")
def host_call_plugin(self, _1, _2, plugin, method, args):
func = getattr(self, '_plugin_%s_%s' % (plugin, method), None)
if not func:
raise Exception('No simulation in host_call_plugin for %s,%s' %
(plugin, method))
return func(method, args)
def VDI_get_virtual_size(self, *args):
return 1 * units.Gi
def VDI_resize_online(self, *args):
return 'derp'
VDI_resize = VDI_resize_online
def _VM_reboot(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
if db_ref['power_state'] != 'Running':
raise Failure(['VM_BAD_POWER_STATE',
'fake-opaque-ref', db_ref['power_state'].lower(), 'halted'])
db_ref['power_state'] = 'Running'
db_ref['domid'] = random.randrange(1, 1 << 16)
def VM_clean_reboot(self, session, vm_ref):
return self._VM_reboot(session, vm_ref)
def VM_hard_reboot(self, session, vm_ref):
return self._VM_reboot(session, vm_ref)
def VM_hard_shutdown(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
db_ref['power_state'] = 'Halted'
db_ref['domid'] = -1
VM_clean_shutdown = VM_hard_shutdown
def VM_suspend(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
db_ref['power_state'] = 'Suspended'
def VM_pause(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
db_ref['power_state'] = 'Paused'
def pool_eject(self, session, host_ref):
pass
def pool_join(self, session, hostname, username, password):
pass
def pool_set_name_label(self, session, pool_ref, name):
pass
def host_migrate_receive(self, session, destref, nwref, options):
return "fake_migrate_data"
def VM_assert_can_migrate(self, session, vmref, migrate_data, live,
vdi_map, vif_map, options):
pass
def VM_migrate_send(self, session, mref, migrate_data, live, vdi_map,
vif_map, options):
pass
def VM_remove_from_blocked_operations(self, session, vm_ref, key):
# operation is idempotent, XenServer doesn't care if the key exists
_db_content['VM'][vm_ref]['blocked_operations'].pop(key, None)
def xenapi_request(self, methodname, params):
if methodname.startswith('login'):
self._login(methodname, params)
return None
elif methodname == 'logout' or methodname == 'session.logout':
self._logout()
return None
else:
full_params = (self._session,) + params
meth = getattr(self, methodname, None)
if meth is None:
LOG.debug('Raising NotImplemented')
raise NotImplementedError(
_('xenapi.fake does not have an implementation for %s') %
methodname)
return meth(*full_params)
def _login(self, method, params):
self._session = str(uuid.uuid4())
_session_info = {'uuid': str(uuid.uuid4()),
'this_host': _db_content['host'].keys()[0]}
_db_content['session'][self._session] = _session_info
def _logout(self):
s = self._session
self._session = None
if s not in _db_content['session']:
raise exception.NovaException(
"Logging out a session that is invalid or already logged "
"out: %s" % s)
del _db_content['session'][s]
def __getattr__(self, name):
if name == 'handle':
return self._session
elif name == 'xenapi':
return _Dispatcher(self.xenapi_request, None)
elif name.startswith('login') or name.startswith('slave_local'):
return lambda *params: self._login(name, params)
elif name.startswith('Async'):
return lambda *params: self._async(name, params)
elif '.' in name:
impl = getattr(self, name.replace('.', '_'))
if impl is not None:
def callit(*params):
LOG.debug('Calling %(name)s %(impl)s',
{'name': name, 'impl': impl})
self._check_session(params)
return impl(*params)
return callit
if self._is_gettersetter(name, True):
LOG.debug('Calling getter %s', name)
return lambda *params: self._getter(name, params)
elif self._is_gettersetter(name, False):
LOG.debug('Calling setter %s', name)
return lambda *params: self._setter(name, params)
elif self._is_create(name):
return lambda *params: self._create(name, params)
elif self._is_destroy(name):
return lambda *params: self._destroy(name, params)
elif name == 'XenAPI':
return FakeXenAPI()
else:
return None
def _is_gettersetter(self, name, getter):
bits = name.split('.')
return (len(bits) == 2 and
bits[0] in _CLASSES and
bits[1].startswith(getter and 'get_' or 'set_'))
def _is_create(self, name):
return self._is_method(name, 'create')
def _is_destroy(self, name):
return self._is_method(name, 'destroy')
def _is_method(self, name, meth):
bits = name.split('.')
return (len(bits) == 2 and
bits[0] in _CLASSES and
bits[1] == meth)
def _getter(self, name, params):
self._check_session(params)
(cls, func) = name.split('.')
if func == 'get_all':
self._check_arg_count(params, 1)
return get_all(cls)
if func == 'get_all_records':
self._check_arg_count(params, 1)
return get_all_records(cls)
if func == 'get_all_records_where':
self._check_arg_count(params, 2)
return get_all_records_where(cls, params[1])
if func == 'get_record':
self._check_arg_count(params, 2)
return get_record(cls, params[1])
if func in ('get_by_name_label', 'get_by_uuid'):
self._check_arg_count(params, 2)
return_singleton = (func == 'get_by_uuid')
return self._get_by_field(
_db_content[cls], func[len('get_by_'):], params[1],
return_singleton=return_singleton)
if len(params) == 2:
field = func[len('get_'):]
ref = params[1]
if (ref in _db_content[cls]):
if (field in _db_content[cls][ref]):
return _db_content[cls][ref][field]
else:
raise Failure(['HANDLE_INVALID', cls, ref])
LOG.debug('Raising NotImplemented')
raise NotImplementedError(
_('xenapi.fake does not have an implementation for %s or it has '
'been called with the wrong number of arguments') % name)
def _setter(self, name, params):
self._check_session(params)
(cls, func) = name.split('.')
if len(params) == 3:
field = func[len('set_'):]
ref = params[1]
val = params[2]
if (ref in _db_content[cls] and
field in _db_content[cls][ref]):
_db_content[cls][ref][field] = val
return
LOG.debug('Raising NotImplemented')
raise NotImplementedError(
'xenapi.fake does not have an implementation for %s or it has '
'been called with the wrong number of arguments or the database '
'is missing that field' % name)
def _create(self, name, params):
self._check_session(params)
is_sr_create = name == 'SR.create'
is_vlan_create = name == 'VLAN.create'
# Storage Repositories have a different API
expected = is_sr_create and 10 or is_vlan_create and 4 or 2
self._check_arg_count(params, expected)
(cls, _) = name.split('.')
ref = (is_sr_create and
_create_sr(cls, params) or
is_vlan_create and
_create_vlan(params[1], params[2], params[3]) or
_create_object(cls, params[1]))
# Call hook to provide any fixups needed (ex. creating backrefs)
after_hook = 'after_%s_create' % cls
if after_hook in globals():
globals()[after_hook](ref, params[1])
obj = get_record(cls, ref)
# Add RO fields
if cls == 'VM':
obj['power_state'] = 'Halted'
return ref
def _destroy(self, name, params):
self._check_session(params)
self._check_arg_count(params, 2)
table = name.split('.')[0]
ref = params[1]
if ref not in _db_content[table]:
raise Failure(['HANDLE_INVALID', table, ref])
# Call destroy function (if exists)
destroy_func = globals().get('destroy_%s' % table.lower())
if destroy_func:
destroy_func(ref)
else:
del _db_content[table][ref]
def _async(self, name, params):
task_ref = create_task(name)
task = _db_content['task'][task_ref]
func = name[len('Async.'):]
try:
result = self.xenapi_request(func, params[1:])
if result:
result = as_value(result)
task['result'] = result
task['status'] = 'success'
except Failure as exc:
task['error_info'] = exc.details
task['status'] = 'failed'
task['finished'] = timeutils.utcnow()
return task_ref
def _check_session(self, params):
if (self._session is None or
self._session not in _db_content['session']):
raise Failure(['HANDLE_INVALID', 'session', self._session])
if len(params) == 0 or params[0] != self._session:
LOG.debug('Raising NotImplemented')
raise NotImplementedError('Call to XenAPI without using .xenapi')
def _check_arg_count(self, params, expected):
actual = len(params)
if actual != expected:
raise Failure(['MESSAGE_PARAMETER_COUNT_MISMATCH',
expected, actual])
def _get_by_field(self, recs, k, v, return_singleton):
result = []
for ref, rec in six.iteritems(recs):
if rec.get(k) == v:
result.append(ref)
if return_singleton:
try:
return result[0]
except IndexError:
raise Failure(['UUID_INVALID', v, result, recs, k])
return result
class FakeXenAPI(object):
def __init__(self):
self.Failure = Failure
# Based upon _Method from xmlrpclib.
class _Dispatcher(object):
def __init__(self, send, name):
self.__send = send
self.__name = name
def __repr__(self):
if self.__name:
return '<xenapi.fake._Dispatcher for %s>' % self.__name
else:
return '<xenapi.fake._Dispatcher>'
def __getattr__(self, name):
if self.__name is None:
return _Dispatcher(self.__send, name)
else:
return _Dispatcher(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
| apache-2.0 |
jorge2703/scikit-learn | sklearn/utils/tests/test_random.py | 230 | 7344 | from __future__ import division
import numpy as np
import scipy.sparse as sp
from scipy.misc import comb as combinations
from numpy.testing import assert_array_almost_equal
from sklearn.utils.random import sample_without_replacement
from sklearn.utils.random import random_choice_csc
from sklearn.utils.testing import (
assert_raises,
assert_equal,
assert_true)
###############################################################################
# test custom sampling without replacement algorithm
###############################################################################
def test_invalid_sample_without_replacement_algorithm():
assert_raises(ValueError, sample_without_replacement, 5, 4, "unknown")
def test_sample_without_replacement_algorithms():
methods = ("auto", "tracking_selection", "reservoir_sampling", "pool")
for m in methods:
def sample_without_replacement_method(n_population, n_samples,
random_state=None):
return sample_without_replacement(n_population, n_samples,
method=m,
random_state=random_state)
check_edge_case_of_sample_int(sample_without_replacement_method)
check_sample_int(sample_without_replacement_method)
check_sample_int_distribution(sample_without_replacement_method)
def check_edge_case_of_sample_int(sample_without_replacement):
# n_poluation < n_sample
assert_raises(ValueError, sample_without_replacement, 0, 1)
assert_raises(ValueError, sample_without_replacement, 1, 2)
# n_population == n_samples
assert_equal(sample_without_replacement(0, 0).shape, (0, ))
assert_equal(sample_without_replacement(1, 1).shape, (1, ))
# n_population >= n_samples
assert_equal(sample_without_replacement(5, 0).shape, (0, ))
assert_equal(sample_without_replacement(5, 1).shape, (1, ))
# n_population < 0 or n_samples < 0
assert_raises(ValueError, sample_without_replacement, -1, 5)
assert_raises(ValueError, sample_without_replacement, 5, -1)
def check_sample_int(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# the sample is of the correct length and contains only unique items
n_population = 100
for n_samples in range(n_population + 1):
s = sample_without_replacement(n_population, n_samples)
assert_equal(len(s), n_samples)
unique = np.unique(s)
assert_equal(np.size(unique), n_samples)
assert_true(np.all(unique < n_population))
# test edge case n_population == n_samples == 0
assert_equal(np.size(sample_without_replacement(0, 0)), 0)
def check_sample_int_distribution(sample_without_replacement):
# This test is heavily inspired from test_random.py of python-core.
#
# For the entire allowable range of 0 <= k <= N, validate that
# sample generates all possible permutations
n_population = 10
# a large number of trials prevents false negatives without slowing normal
# case
n_trials = 10000
for n_samples in range(n_population):
# Counting the number of combinations is not as good as counting the
# the number of permutations. However, it works with sampling algorithm
# that does not provide a random permutation of the subset of integer.
n_expected = combinations(n_population, n_samples, exact=True)
output = {}
for i in range(n_trials):
output[frozenset(sample_without_replacement(n_population,
n_samples))] = None
if len(output) == n_expected:
break
else:
raise AssertionError(
"number of combinations != number of expected (%s != %s)" %
(len(output), n_expected))
def test_random_choice_csc(n_samples=10000, random_state=24):
# Explicit class probabilities
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Implicit class probabilities
classes = [[0, 1], [1, 2]] # test for array-like support
class_probabilites = [np.array([0.5, 0.5]), np.array([0, 1/2, 1/2])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / float(n_samples)
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# Edge case proabilites 1.0 and 0.0
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([1.0, 0.0]), np.array([0.0, 1.0, 0.0])]
got = random_choice_csc(n_samples, classes, class_probabilites,
random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel(),
minlength=len(class_probabilites[k])) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
# One class target data
classes = [[1], [0]] # test for array-like support
class_probabilites = [np.array([0.0, 1.0]), np.array([1.0])]
got = random_choice_csc(n_samples=n_samples,
classes=classes,
random_state=random_state)
assert_true(sp.issparse(got))
for k in range(len(classes)):
p = np.bincount(got.getcol(k).toarray().ravel()) / n_samples
assert_array_almost_equal(class_probabilites[k], p, decimal=1)
def test_random_choice_csc_errors():
# the length of an array in classes and class_probabilites is mismatched
classes = [np.array([0, 1]), np.array([0, 1, 2, 3])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array(["a", "1"]), np.array(["z", "1", "2"])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# the class dtype is not supported
classes = [np.array([4.2, 0.1]), np.array([0.1, 0.2, 9.4])]
class_probabilites = [np.array([0.5, 0.5]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
# Given proabilites don't sum to 1
classes = [np.array([0, 1]), np.array([0, 1, 2])]
class_probabilites = [np.array([0.5, 0.6]), np.array([0.6, 0.1, 0.3])]
assert_raises(ValueError, random_choice_csc, 4, classes,
class_probabilites, 1)
| bsd-3-clause |
sadad111/leetcodebox | Add Two Numbers II.py | 1 | 1969 | # /**
# * Definition for singly-linked list.
# * public class ListNode {
# * int val;
# * ListNode next;
# * ListNode(int x) { val = x; }
# * }
# */
# public class Solution {
# public ListNode addTwoNumbers(ListNode l1, ListNode l2) {
# Stack<Integer> s1 = new Stack<Integer>();
# Stack<Integer> s2 = new Stack<Integer>();
#
# while(l1 != null) {
# s1.push(l1.val);
# l1 = l1.next;
# };
# while(l2 != null) {
# s2.push(l2.val);
# l2 = l2.next;
# }
#
# int sum = 0;
# ListNode list = new ListNode(0);
# while (!s1.empty() || !s2.empty()) {
# if (!s1.empty()) sum += s1.pop();
# if (!s2.empty()) sum += s2.pop();
# list.val = sum % 10;
# ListNode head = new ListNode(sum / 10);
# head.next = list;
# list = head;
# sum /= 10;
# }
#
# return list.val == 0 ? list.next : list;
# }
# }
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
stack1,stack2 = [],[]
while l1:
stack1.append(l1.val)
l1=l1.next
while l2:
stack2.append(l2.val)
l2=l2.next
sum_num = 0
list_node = ListNode(0)
while stack1 or stack2:
if stack1:
sum_num += stack1.pop()
if stack2:
sum_num += stack2.pop()
list_node.val = sum_num % 10
head = ListNode(sum_num//10)
head.next = list_node
list_node = head
sum_num //= 10
return head if head.val != 0 else head.next
| gpl-3.0 |
117111302/iris-panel | iris/packagedb/tests/test_apiviews.py | 7 | 12682 | # -*- coding: utf-8 -*-
#This file is part of IRIS: Infrastructure and Release Information System
#
# Copyright (C) 2013 Intel Corporation
#
# IRIS is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# version 2.0 as published by the Free Software Foundation.
"""
This is the REST framework test class for the iris-packagedb project REST API.
"""
#pylint: disable=no-member,missing-docstring,invalid-name
#E:397,18: Instance of 'HttpResponse' has no 'data' member (no-member)
#C: 36, 0: Missing function docstring (missing-docstring)
#C: 96, 8: Invalid variable name "d" (invalid-name)
import base64
import urllib
from django.test import TestCase
from django.contrib.auth.models import User
from iris.core.models import (
Domain, SubDomain, GitTree, Package, Product, License, DomainRole,
SubDomainRole, GitTreeRole)
def sort_data(data):
if isinstance(data, list):
data.sort()
for item in data:
sort_data(item)
if isinstance(data, dict):
for value in data.itervalues():
sort_data(value)
class ProductsTests(TestCase):
"""
The REST framework test case class of Product APIView
"""
def setUp(self):
"""
Create 2 Product instance. One includes 2 gittrees, the other includes
1 gittree.
Create 2 test user.
"""
user = User.objects.create_user(username='nemo', password='password')
d = Domain.objects.create(name='doamin')
sd = SubDomain.objects.create(name='subdoamin', domain=d)
gt1 = GitTree.objects.create(gitpath='a/b', subdomain=sd)
gt2 = GitTree.objects.create(gitpath='c/d', subdomain=sd)
p1 = Product.objects.create(name='product', description='product1')
p2 = Product.objects.create(name='a:b', description='product2')
p1.gittrees.add(gt1, gt2)
p2.gittrees.add(gt2)
def test_get_info(self):
"""
GET requests to APIView should return list of objects.
"""
url = '/api/packagedb/products/'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = [{
'name': 'a:b',
'description': 'product2',
'gittrees': ['c/d']
}, {
'name': 'product',
'description': 'product1',
'gittrees': ['a/b', 'c/d']
}]
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
def test_get_detail(self):
"""
GET requests to APIView should return a single object.
"""
url = "/api/packagedb/products/a:b/"
url = urllib.quote(url)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'name': 'a:b',
'description': 'product2',
'gittrees': ['c/d'],
}
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
def test_get_not_deleted_detail(self):
"""
GET requests to APIView should raise 404
If it does not currently exist.
"""
url = "/api/packagedb/products/999/"
response = self.client.get(url)
self.assertEqual(response.status_code, 404)
class DomainsTests(TestCase):
"""
The REST framework test case class of Domain APIView
"""
def setUp(self):
"""
Create 2 SubDomain instance, one of them is 'Uncategorized',
one domainrole, one subdomainrole.
Create 2 test user.
"""
user = User.objects.create_user(
username='nemo', password='password', email='nemo@a.com')
user2 = User.objects.create_user(
username='lucy', password='lucy',
first_name='jaeho81', last_name='lucy',
email='jaeho81.lucy@a.com')
d1 = Domain.objects.create(name='domain1')
d2 = Domain.objects.create(name='domain2')
sd1 = SubDomain.objects.create(name='subdomain', domain=d1)
SubDomain.objects.create(name='Uncategorized', domain=d2)
dr = DomainRole.objects.create(
role='Architect', domain=d2,
name="%s: %s" % ('Architect', d2.name))
user.groups.add(dr)
user2.groups.add(dr)
sdr = SubDomainRole.objects.create(
role='Maintainer', subdomain=sd1,
name="%s: %s" % ('Maintainer', sd1.name))
user.groups.add(sdr)
user2.groups.add(sdr)
def test_get_info(self):
"""
GET requests to APIView should return list of objects.
"""
url = '/api/packagedb/domains/'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = [{
'name': 'domain1 / subdomain',
'roles': {
'Maintainer': [{
'first_name': '',
'last_name': '',
'email': 'nemo@a.com',
}, {
'first_name': 'jaeho81',
'last_name': 'lucy',
'email': 'jaeho81.lucy@a.com'
}]
},
}, {
'name': 'domain2 / Uncategorized',
'roles': {
'Architect': [{
'first_name': '',
'last_name': '',
'email': 'nemo@a.com'
}, {
'first_name': 'jaeho81',
'last_name': 'lucy',
'email': 'jaeho81.lucy@a.com'
}],
},
}]
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
def test_get_detail(self):
"""
GET requests to APIView should return single objects.
"""
url = '/api/packagedb/domains/domain2 / Uncategorized/'
url = urllib.quote(url)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'name': 'domain2 / Uncategorized',
'roles': {
'Architect': [{
'first_name': '',
'last_name': '',
'email': 'nemo@a.com'
}, {
'first_name': 'jaeho81',
'last_name': 'lucy',
'email': 'jaeho81.lucy@a.com'
}],
}
}
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
class GitTreesTests(TestCase):
"""
The REST framework test case class of GitTree APIView
"""
def setUp(self):
"""
Create 2 GitTree instance. One realted with domain, two packages,
another one realted with subdomain, one license.
with subdomain,
Create 2 test user.
"""
user1 = User.objects.create_user(
username='nemo', password='password', email='nemo@a.com')
user2 = User.objects.create_user(
username='lucy', password='password',
first_name='jaeho81', last_name='lucy',
email='jaeho81.lucy@a.com')
domain = Domain.objects.create(name='domain')
sd1 = SubDomain.objects.create(name='subdomain', domain=domain)
sd2 = SubDomain.objects.create(name='Uncategorized', domain=domain)
gt1 = GitTree.objects.create(gitpath='d/f', subdomain=sd1)
gt2 = GitTree.objects.create(gitpath='a/b/c', subdomain=sd2)
p1 = Package.objects.create(name='xap1')
p2 = Package.objects.create(name='p2')
gt1.packages.add(p1, p2)
gt2.packages.add(p2)
l1 = License.objects.create(shortname='license1',
fullname='labc def',
text='helo')
l2 = License.objects.create(shortname='abc',
fullname='weldome sdfs',
text='helo world')
gt2.licenses.add(l1, l2)
gr1 = GitTreeRole.objects.create(
role='Integrator', gittree=gt1,
name='Integrator: %s' % gt1.gitpath)
user1.groups.add(gr1)
user2.groups.add(gr1)
gr2 = GitTreeRole.objects.create(
role='Maintainer', gittree=gt2,
name='Integrator: %s' % gt2.gitpath)
user1.groups.add(gr2)
user2.groups.add(gr2)
def test_get_info(self):
"""
GET requests to APIView should return list of objects.
"""
url = '/api/packagedb/gittrees/'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = [{
'gitpath': 'a/b/c',
'domain': 'domain / Uncategorized',
'roles': {
'Maintainer': [{
'first_name': '',
'last_name': '',
'email': 'nemo@a.com'
}, {
'first_name': 'jaeho81',
'last_name': 'lucy',
'email': 'jaeho81.lucy@a.com'
}],
},
'packages': ['p2'],
'licenses': ['license1', 'abc'],
}, {
'gitpath': 'd/f',
'domain': 'domain / subdomain',
'roles': {
'Integrator': [{
'first_name': '',
'last_name': '',
'email': 'nemo@a.com'
}, {
'first_name': 'jaeho81',
'last_name': 'lucy',
'email': 'jaeho81.lucy@a.com'
}]
},
'packages': ['xap1', 'p2'],
'licenses': [],
}]
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
def test_get_detail(self):
"""
GET requests to APIView should return single objects.
"""
url = '/api/packagedb/gittrees/d/f/'
url = urllib.quote(url)
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {
'gitpath': 'd/f',
'domain': 'domain / subdomain',
'roles': {
'Integrator': [{
'first_name': '',
'last_name': '',
'email': 'nemo@a.com'
}, {
'first_name': 'jaeho81',
'last_name': 'lucy',
'email': 'jaeho81.lucy@a.com'
}],
},
'packages': ['xap1', 'p2'],
'licenses': [],
}
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
class PackagesTests(TestCase):
"""
The REST framework test case class of Package APIView
"""
def setUp(self):
"""
Create 2 Package instance, one realted with two gittrees, the other
related with 1 gittree.
Create 1 test user.
"""
user = User.objects.create_user(username='nemo', password='password')
domain = Domain.objects.create(name='domain')
subdomain = SubDomain.objects.create(name='subdomain', domain=domain)
gt1 = GitTree.objects.create(gitpath='agitpath1', subdomain=subdomain)
gt2 = GitTree.objects.create(gitpath='gitpath2', subdomain=subdomain)
pack1 = Package.objects.create(name='package1')
pack2 = Package.objects.create(name='package2')
gt1.packages.add(pack1, pack2)
gt2.packages.add(pack2)
def test_get_info(self):
"""
GET requests to APIView should return list of objects.
"""
url = '/api/packagedb/packages/'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = [{
'name': 'package1',
'gittrees': ['agitpath1']
}, {
'name': 'package2',
'gittrees': ['gitpath2', 'agitpath1']
}]
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
def test_get_detail(self):
"""
GET requests to APIView should return a single object.
"""
url = '/api/packagedb/packages/package2/'
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
data = {'name': 'package2', 'gittrees': ['agitpath1', 'gitpath2']}
sort_data(data)
sort_data(response.data)
self.assertEqual(response.data, data)
| gpl-2.0 |
ryancoleman/autodock-vina | boost_1_54_0/tools/build/v2/test/build_file.py | 44 | 5117 | #!/usr/bin/python
# Copyright (C) 2006. Vladimir Prus
# Copyright (C) 2008. Jurko Gospodnetic
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Tests that we explicitly request a file (not target) to be built by
# specifying its name on the command line.
import BoostBuild
###############################################################################
#
# test_building_file_from_specific_project()
# ------------------------------------------
#
###############################################################################
def test_building_file_from_specific_project():
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\
exe hello : hello.cpp ;
exe hello2 : hello.cpp ;
build-project sub ;
""")
t.write("hello.cpp", "int main() {}\n")
t.write("sub/jamfile.jam", """
exe hello : hello.cpp ;
exe hello2 : hello.cpp ;
exe sub : hello.cpp ;
""")
t.write("sub/hello.cpp", "int main() {}\n")
t.run_build_system(["sub", t.adjust_suffix("hello.obj")])
t.expect_output_lines("*depends on itself*", False)
t.expect_addition("sub/bin/$toolset/debug/hello.obj")
t.expect_nothing_more()
t.cleanup()
###############################################################################
#
# test_building_file_from_specific_target()
# -----------------------------------------
#
###############################################################################
def test_building_file_from_specific_target():
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\
exe hello1 : hello1.cpp ;
exe hello2 : hello2.cpp ;
exe hello3 : hello3.cpp ;
""")
t.write("hello1.cpp", "int main() {}\n")
t.write("hello2.cpp", "int main() {}\n")
t.write("hello3.cpp", "int main() {}\n")
t.run_build_system(["hello1", t.adjust_suffix("hello1.obj")])
t.expect_addition("bin/$toolset/debug/hello1.obj")
t.expect_nothing_more()
t.cleanup()
###############################################################################
#
# test_building_missing_file_from_specific_target()
# -------------------------------------------------
#
###############################################################################
def test_building_missing_file_from_specific_target():
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\
exe hello1 : hello1.cpp ;
exe hello2 : hello2.cpp ;
exe hello3 : hello3.cpp ;
""")
t.write("hello1.cpp", "int main() {}\n")
t.write("hello2.cpp", "int main() {}\n")
t.write("hello3.cpp", "int main() {}\n")
obj = t.adjust_suffix("hello2.obj")
t.run_build_system(["hello1", obj], status=1)
t.expect_output_lines("don't know how to make*" + obj)
t.expect_nothing_more()
t.cleanup()
###############################################################################
#
# test_building_multiple_files_with_different_names()
# ---------------------------------------------------
#
###############################################################################
def test_building_multiple_files_with_different_names():
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\
exe hello1 : hello1.cpp ;
exe hello2 : hello2.cpp ;
exe hello3 : hello3.cpp ;
""")
t.write("hello1.cpp", "int main() {}\n")
t.write("hello2.cpp", "int main() {}\n")
t.write("hello3.cpp", "int main() {}\n")
t.run_build_system([t.adjust_suffix("hello1.obj"), t.adjust_suffix(
"hello2.obj")])
t.expect_addition("bin/$toolset/debug/hello1.obj")
t.expect_addition("bin/$toolset/debug/hello2.obj")
t.expect_nothing_more()
t.cleanup()
###############################################################################
#
# test_building_multiple_files_with_the_same_name()
# -------------------------------------------------
#
###############################################################################
def test_building_multiple_files_with_the_same_name():
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """\
exe hello : hello.cpp ;
exe hello2 : hello.cpp ;
build-project sub ;
""")
t.write("hello.cpp", "int main() {}\n")
t.write("sub/jamfile.jam", """
exe hello : hello.cpp ;
exe hello2 : hello.cpp ;
exe sub : hello.cpp ;
""")
t.write("sub/hello.cpp", "int main() {}\n")
t.run_build_system([t.adjust_suffix("hello.obj")])
t.expect_output_lines("*depends on itself*", False)
t.expect_addition("bin/$toolset/debug/hello.obj")
t.expect_addition("sub/bin/$toolset/debug/hello.obj")
t.expect_nothing_more()
t.cleanup()
###############################################################################
#
# main()
# ------
#
###############################################################################
test_building_file_from_specific_project()
test_building_file_from_specific_target()
test_building_missing_file_from_specific_target()
test_building_multiple_files_with_different_names()
test_building_multiple_files_with_the_same_name()
| apache-2.0 |
calico/basenji | bin/sonnet_sat_bed.py | 1 | 9499 | #!/usr/bin/env python
# Copyright 2017 Calico LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
from optparse import OptionParser
import gc
import json
import os
import pdb
import pickle
from queue import Queue
import random
import sys
from threading import Thread
import h5py
import numpy as np
import pandas as pd
import pysam
import tensorflow as tf
if tf.__version__[0] == '1':
tf.compat.v1.enable_eager_execution()
from basenji import bed
from basenji import dna_io
from basenji import seqnn
from basenji import stream
from basenji_sat_bed import satmut_gen, ScoreWorker
'''
sonnet_sat_bed.py
Perform an in silico saturation mutagenesis of sequences in a BED file.
'''
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <model> <bed_file>'
parser = OptionParser(usage)
parser.add_option('-d', dest='mut_down',
default=0, type='int',
help='Nucleotides downstream of center sequence to mutate [Default: %default]')
parser.add_option('-f', dest='genome_fasta',
default=None,
help='Genome FASTA for sequences [Default: %default]')
parser.add_option('-l', dest='mut_len',
default=0, type='int',
help='Length of center sequence to mutate [Default: %default]')
parser.add_option('-o', dest='out_dir',
default='sat_mut', help='Output directory [Default: %default]')
parser.add_option('--plots', dest='plots',
default=False, action='store_true',
help='Make heatmap plots [Default: %default]')
parser.add_option('-p', dest='processes',
default=None, type='int',
help='Number of processes, passed by multi script')
parser.add_option('--rc', dest='rc',
default=False, action='store_true',
help='Ensemble forward and reverse complement predictions [Default: %default]')
parser.add_option('--shifts', dest='shifts',
default='0',
help='Ensemble prediction shifts [Default: %default]')
parser.add_option('--species', dest='species',
default='human')
parser.add_option('--stats', dest='sad_stats',
default='sum',
help='Comma-separated list of stats to save (sum/center/scd). [Default: %default]')
parser.add_option('-t', dest='targets_file',
default=None, type='str',
help='File specifying target indexes and labels in table format')
parser.add_option('-u', dest='mut_up',
default=0, type='int',
help='Nucleotides upstream of center sequence to mutate [Default: %default]')
(options, args) = parser.parse_args()
if len(args) == 2:
# single worker
model_file = args[0]
bed_file = args[1]
elif len(args) == 3:
# master script
options_pkl_file = args[0]
model_file = args[1]
bed_file = args[2]
# load options
options_pkl = open(options_pkl_file, 'rb')
options = pickle.load(options_pkl)
options_pkl.close()
elif len(args) == 4:
# multi worker
options_pkl_file = args[0]
model_file = args[1]
bed_file = args[2]
worker_index = int(args[3])
# load options
options_pkl = open(options_pkl_file, 'rb')
options = pickle.load(options_pkl)
options_pkl.close()
# update output directory
options.out_dir = '%s/job%d' % (options.out_dir, worker_index)
else:
parser.error('Must provide parameter and model files and BED file')
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
options.shifts = [int(shift) for shift in options.shifts.split(',')]
options.sad_stats = [sad_stat.lower() for sad_stat in options.sad_stats.split(',')]
if options.mut_up > 0 or options.mut_down > 0:
options.mut_len = options.mut_up + options.mut_down
else:
assert(options.mut_len > 0)
options.mut_up = options.mut_len // 2
options.mut_down = options.mut_len - options.mut_up
# read targets
if options.targets_file is None:
target_slice = None
else:
targets_df = pd.read_table(options.targets_file, index_col=0)
target_slice = targets_df.index
#################################################################
# setup model
seqnn_model = tf.saved_model.load(model_file).model
# query num model targets
seq_length = seqnn_model.predict_on_batch.input_signature[0].shape[1]
null_1hot = np.zeros((1,seq_length,4))
null_preds = seqnn_model.predict_on_batch(null_1hot)
null_preds = null_preds[options.species].numpy()
_, preds_length, num_targets = null_preds.shape
#################################################################
# sequence dataset
# read sequences from BED
seqs_dna, seqs_coords = bed.make_bed_seqs(
bed_file, options.genome_fasta, seq_length, stranded=True)
# filter for worker SNPs
if options.processes is not None:
worker_bounds = np.linspace(0, len(seqs_dna), options.processes+1, dtype='int')
seqs_dna = seqs_dna[worker_bounds[worker_index]:worker_bounds[worker_index+1]]
seqs_coords = seqs_coords[worker_bounds[worker_index]:worker_bounds[worker_index+1]]
num_seqs = len(seqs_dna)
# determine mutation region limits
seq_mid = seq_length // 2
mut_start = seq_mid - options.mut_up
mut_end = mut_start + options.mut_len
# make sequence generator
seqs_gen = satmut_gen(seqs_dna, mut_start, mut_end)
#################################################################
# setup output
scores_h5_file = '%s/scores.h5' % options.out_dir
if os.path.isfile(scores_h5_file):
os.remove(scores_h5_file)
scores_h5 = h5py.File(scores_h5_file, 'w')
scores_h5.create_dataset('seqs', dtype='bool',
shape=(num_seqs, options.mut_len, 4))
for sad_stat in options.sad_stats:
scores_h5.create_dataset(sad_stat, dtype='float16',
shape=(num_seqs, options.mut_len, 4, num_targets))
# store mutagenesis sequence coordinates
scores_chr = []
scores_start = []
scores_end = []
scores_strand = []
for seq_chr, seq_start, seq_end, seq_strand in seqs_coords:
scores_chr.append(seq_chr)
scores_strand.append(seq_strand)
if seq_strand == '+':
score_start = seq_start + mut_start
score_end = score_start + options.mut_len
else:
score_end = seq_end - mut_start
score_start = score_end - options.mut_len
scores_start.append(score_start)
scores_end.append(score_end)
scores_h5.create_dataset('chr', data=np.array(scores_chr, dtype='S'))
scores_h5.create_dataset('start', data=np.array(scores_start))
scores_h5.create_dataset('end', data=np.array(scores_end))
scores_h5.create_dataset('strand', data=np.array(scores_strand, dtype='S'))
preds_per_seq = 1 + 3*options.mut_len
score_threads = []
score_queue = Queue()
for i in range(1):
sw = ScoreWorker(score_queue, scores_h5, options.sad_stats,
mut_start, mut_end)
sw.start()
score_threads.append(sw)
#################################################################
# predict scores, write output
# find center
center_start = preds_length // 2
if preds_length % 2 == 0:
center_end = center_start + 2
else:
center_end = center_start + 1
# initialize predictions stream
preds_stream = stream.PredStreamSonnet(seqnn_model, seqs_gen,
rc=options.rc, shifts=options.shifts, species=options.species)
# predictions index
pi = 0
for si in range(num_seqs):
print('Predicting %d' % si, flush=True)
# collect sequence predictions
seq_preds_sum = []
seq_preds_center = []
seq_preds_scd = []
preds_mut0 = preds_stream[pi]
for spi in range(preds_per_seq):
preds_mut = preds_stream[pi]
preds_sum = preds_mut.sum(axis=0)
seq_preds_sum.append(preds_sum)
if 'center' in options.sad_stats:
preds_center = preds_mut[center_start:center_end,:].sum(axis=0)
seq_preds_center.append(preds_center)
if 'scd' in options.sad_stats:
preds_scd = np.sqrt(((preds_mut-preds_mut0)**2).sum(axis=0))
seq_preds_scd.append(preds_scd)
pi += 1
seq_preds_sum = np.array(seq_preds_sum)
seq_preds_center = np.array(seq_preds_center)
seq_preds_scd = np.array(seq_preds_scd)
# wait for previous to finish
score_queue.join()
# queue sequence for scoring
seq_pred_stats = (seq_preds_sum, seq_preds_center, seq_preds_scd)
score_queue.put((seqs_dna[si], seq_pred_stats, si))
# queue sequence for plotting
if options.plots:
plot_queue.put((seqs_dna[si], seq_preds_sum, si))
gc.collect()
# finish queue
print('Waiting for threads to finish.', flush=True)
score_queue.join()
# close output HDF5
scores_h5.close()
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
| apache-2.0 |
strahlc/exaile | plugins/somafm/__init__.py | 3 | 6699 | # Copyright (C) 2012 Rocco Aliberti
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
import logging
logger = logging.getLogger(__name__)
import os
from urllib2 import urlparse
import httplib
import socket
try:
import xml.etree.cElementTree as ETree
except:
import xml.etree.ElementTree as ETree
from xl import (
event,
main,
playlist,
xdg
)
from xl.radio import *
from xl.nls import gettext as _
from xlgui.panel import radio
STATION = None
def enable(exaile):
if exaile.loading:
event.add_callback(_enable, 'exaile_loaded')
else:
_enable(None, exaile, None)
def _enable(o1, exaile, o2):
global STATION
STATION = SomaFMRadioStation()
exaile.radio.add_station(STATION)
def disable(exaile):
global STATION
exaile.radio.remove_station(STATION)
STATION = None
def set_status(message, timeout = 0):
radio.set_status(message, timeout)
class SomaFMRadioStation(RadioStation):
name = "somafm"
def __init__(self):
"""
Initializes the somafm radio station
"""
self.user_agent = main.exaile().get_user_agent_string('somafm')
self.somafm_url = 'http://somafm.com/'
self.channels_xml_url = self.somafm_url + 'channels.xml'
self.cache_file = os.path.join(xdg.get_cache_dir(),'somafm.cache')
self.channelist = ''
self.data = {}
self._load_cache()
self.subs = {}
self.playlists = {}
self.playlist_id = 0
logger.debug(self.user_agent)
def get_document(self, url):
"""
Connects to the server and retrieves the document
"""
set_status(_('Contacting SomaFM server...'))
hostinfo = urlparse.urlparse(url)
try:
c = httplib.HTTPConnection(hostinfo.netloc, timeout = 20)
except TypeError:
c = httplib.HTTPConnection(hostinfo.netloc)
try:
c.request('GET', hostinfo.path, headers={'User-Agent':
self.user_agent})
response = c.getresponse()
except (socket.timeout, socket.error):
raise radio.RadioException(_('Error connecting to SomaFM server.'))
if response.status != 200:
raise radio.RadioException(_('Error connecting to SomaFM server.'))
document = response.read()
c.close()
set_status('')
return document
def _load_cache(self):
"""
Loads somafm data from cache
"""
self.data = {}
if os.path.isfile(self.cache_file):
tree = ETree.parse(self.cache_file)
for channel in tree.findall('channel'):
self.data[channel.get("id")] = channel.get("name")
def _save_cache(self):
"""
Saves cache data
"""
channellist = ETree.Element('channellist')
for id, name in self.data.items():
channel = ETree.SubElement(channellist, 'channel', id=id, name=name)
with open(self.cache_file, 'w') as h:
h.write('<?xml version="1.0" encoding="UTF-8"?>')
h.write(ETree.tostring(channellist, 'utf-8'))
def get_lists(self, no_cache = False):
"""
Returns the rlists for somafm
"""
if no_cache or not self.data:
self.channellist = self.get_document(self.channels_xml_url)
data = {}
tree = ETree.fromstring(self.channellist)
for channel in tree.findall('channel'):
name = channel.find('title').text
data[channel.get("id")] = name
self.data = data
self._save_cache()
else:
data = self.data
rlists = []
for id, name in data.items():
rlist = RadioList(name, station = self)
rlist.get_items = lambda no_cache, id = id: \
self._get_subrlists(id = id, no_cache = no_cache)
rlists.append(rlist)
sort_list = [(item.name, item) for item in rlists]
sort_list.sort()
rlists = [item[1] for item in sort_list]
self.rlists = rlists
return rlists
def _get_subrlists(self, id, no_cache = False):
"""
Gets the subrlists for a rlist
"""
if no_cache or id not in self.subs:
rlists = self._get_stations(id)
sort_list = [(item.name, item) for item in rlists]
sort_list.sort()
rlists = [item[1] for item in sort_list]
self.subs[id] = rlists
return self.subs[id]
def _get_playlist(self, url, playlist_id):
"""
Gets the playlist for the given url and id
"""
if playlist_id not in self.playlists:
set_status(_('Contacting SomaFM server...'))
try:
self.playlists[playlist_id] = playlist.import_playlist(url)
except:
set_status(_("Error importing playlist"))
logger.exception("Error importing playlist")
set_status('')
return self.playlists[playlist_id]
def _get_stations(self, id):
if not self.channelist:
self.channelist = self.get_document(self.channels_xml_url)
tree = ETree.fromstring(self.channelist)
channel = tree.find('.//channel[@id="%s"]' % id)
plss = channel.findall('.//*[@format]')
rlists = []
i = 1
for pls in plss:
type = pls.tag.replace('pls','')
format = pls.attrib['format'].upper()
url = pls.text
display_name = format + " - " + type
rlist = RadioItem(display_name, station = self)
rlist.format = format
rlist.get_playlist = lambda url = url,\
playlist_id = self.playlist_id :\
self._get_playlist(url, playlist_id)
self.playlist_id += 1
rlists.append(rlist)
return rlists
def get_menu(self, parent):
return parent.get_menu()
| gpl-2.0 |
chirilo/airmozilla | airmozilla/manage/views/approvals.py | 9 | 3643 | from django import http
from django.contrib import messages
from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from django.db import transaction
from airmozilla.main.models import (
Approval,
Event,
SuggestedEvent
)
from airmozilla.manage import forms
from .decorators import staff_required, permission_required
@staff_required
@permission_required('main.change_approval')
def approvals(request):
user = request.user
groups = user.groups.all()
if groups.count():
approvals = (
Approval.objects
.filter(
group__in=user.groups.all(),
processed=False
)
.exclude(
event__status=Event.STATUS_REMOVED
)
.select_related('event', 'group', 'event__creator')
)
recent = (
Approval.objects
.filter(
group__in=user.groups.all(),
processed=True
)
.select_related('event', 'user', 'group')
.order_by('-processed_time')[:25]
)
else:
approvals = recent = Approval.objects.none()
# Of all the events in the approvals queryset, make a
# dict of accepted events' IDs to the suggested event.
approval_events = approvals.values_list('event_id', flat=True)
all_suggestedevents = {}
qs = SuggestedEvent.objects.filter(accepted_id__in=approval_events)
for each in qs:
all_suggestedevents[each.accepted_id] = each
def get_suggested_event(event):
"""return the original suggested event or None"""
try:
return all_suggestedevents[event.id]
except KeyError:
pass
context = {
'approvals': approvals,
'recent': recent,
'user_groups': groups,
'get_suggested_event': get_suggested_event,
}
return render(request, 'manage/approvals.html', context)
@staff_required
@permission_required('main.change_approval')
@transaction.atomic
def approval_review(request, id):
"""Approve/deny an event on behalf of a group."""
approval = get_object_or_404(Approval, id=id)
if approval.group not in request.user.groups.all():
return redirect('manage:approvals')
if request.method == 'POST':
form = forms.ApprovalForm(request.POST, instance=approval)
approval = form.save(commit=False)
approval.approved = 'approve' in request.POST
approval.processed = True
approval.user = request.user
approval.save()
messages.info(request, '"%s" approval saved.' % approval.event.title)
return redirect('manage:approvals')
else:
form = forms.ApprovalForm(instance=approval)
context = {'approval': approval, 'form': form}
try:
suggested_event = SuggestedEvent.objects.get(accepted=approval.event)
except SuggestedEvent.DoesNotExist:
suggested_event = None
context['suggested_event'] = suggested_event
return render(request, 'manage/approval_review.html', context)
@require_POST
@staff_required
@permission_required('main.change_approval')
@transaction.atomic
def approval_reconsider(request):
id = request.POST.get('id')
if not id:
return http.HttpResponseBadRequest('no id')
try:
approval = get_object_or_404(Approval, id=id)
except ValueError:
return http.HttpResponseBadRequest('invalid id')
approval.processed = False
approval.approved = False
approval.comment = ''
approval.save()
return redirect('manage:approvals')
| bsd-3-clause |
mhvk/astropy | astropy/tests/command.py | 8 | 13858 | """Implements the wrapper for the Astropy test runner.
This is for backward-compatibility for other downstream packages and can be removed
once astropy-helpers has reached end-of-life.
"""
import os
import stat
import shutil
import subprocess
import sys
import tempfile
from contextlib import contextmanager
from setuptools import Command
from astropy.logger import log
@contextmanager
def _suppress_stdout():
'''
A context manager to temporarily disable stdout.
Used later when installing a temporary copy of astropy to avoid a
very verbose output.
'''
with open(os.devnull, "w") as devnull:
old_stdout = sys.stdout
sys.stdout = devnull
try:
yield
finally:
sys.stdout = old_stdout
class FixRemoteDataOption(type):
"""
This metaclass is used to catch cases where the user is running the tests
with --remote-data. We've now changed the --remote-data option so that it
takes arguments, but we still want --remote-data to work as before and to
enable all remote tests. With this metaclass, we can modify sys.argv
before distutils/setuptools try to parse the command-line options.
"""
def __init__(cls, name, bases, dct):
try:
idx = sys.argv.index('--remote-data')
except ValueError:
pass
else:
sys.argv[idx] = '--remote-data=any'
try:
idx = sys.argv.index('-R')
except ValueError:
pass
else:
sys.argv[idx] = '-R=any'
return super().__init__(name, bases, dct)
class AstropyTest(Command, metaclass=FixRemoteDataOption):
description = 'Run the tests for this package'
user_options = [
('package=', 'P',
"The name of a specific package to test, e.g. 'io.fits' or 'utils'. "
"Accepts comma separated string to specify multiple packages. "
"If nothing is specified, all default tests are run."),
('test-path=', 't',
'Specify a test location by path. If a relative path to a .py file, '
'it is relative to the built package, so e.g., a leading "astropy/" '
'is necessary. If a relative path to a .rst file, it is relative to '
'the directory *below* the --docs-path directory, so a leading '
'"docs/" is usually necessary. May also be an absolute path.'),
('verbose-results', 'V',
'Turn on verbose output from pytest.'),
('plugins=', 'p',
'Plugins to enable when running pytest.'),
('pastebin=', 'b',
"Enable pytest pastebin output. Either 'all' or 'failed'."),
('args=', 'a',
'Additional arguments to be passed to pytest.'),
('remote-data=', 'R', 'Run tests that download remote data. Should be '
'one of none/astropy/any (defaults to none).'),
('pep8', '8',
'Enable PEP8 checking and disable regular tests. '
'Requires the pytest-pep8 plugin.'),
('pdb', 'd',
'Start the interactive Python debugger on errors.'),
('coverage', 'c',
'Create a coverage report. Requires the coverage package.'),
('open-files', 'o', 'Fail if any tests leave files open. Requires the '
'psutil package.'),
('parallel=', 'j',
'Run the tests in parallel on the specified number of '
'CPUs. If "auto", all the cores on the machine will be '
'used. Requires the pytest-xdist plugin.'),
('docs-path=', None,
'The path to the documentation .rst files. If not provided, and '
'the current directory contains a directory called "docs", that '
'will be used.'),
('skip-docs', None,
"Don't test the documentation .rst files."),
('repeat=', None,
'How many times to repeat each test (can be used to check for '
'sporadic failures).'),
('temp-root=', None,
'The root directory in which to create the temporary testing files. '
'If unspecified the system default is used (e.g. /tmp) as explained '
'in the documentation for tempfile.mkstemp.'),
('verbose-install', None,
'Turn on terminal output from the installation of astropy in a '
'temporary folder.'),
('readonly', None,
'Make the temporary installation being tested read-only.')
]
package_name = ''
def initialize_options(self):
self.package = None
self.test_path = None
self.verbose_results = False
self.plugins = None
self.pastebin = None
self.args = None
self.remote_data = 'none'
self.pep8 = False
self.pdb = False
self.coverage = False
self.open_files = False
self.parallel = 0
self.docs_path = None
self.skip_docs = False
self.repeat = None
self.temp_root = None
self.verbose_install = False
self.readonly = False
def finalize_options(self):
# Normally we would validate the options here, but that's handled in
# run_tests
pass
def generate_testing_command(self):
"""
Build a Python script to run the tests.
"""
cmd_pre = '' # Commands to run before the test function
cmd_post = '' # Commands to run after the test function
if self.coverage:
pre, post = self._generate_coverage_commands()
cmd_pre += pre
cmd_post += post
set_flag = "import builtins; builtins._ASTROPY_TEST_ = True"
cmd = ('{cmd_pre}{0}; import {1.package_name}, sys; result = ('
'{1.package_name}.test('
'package={1.package!r}, '
'test_path={1.test_path!r}, '
'args={1.args!r}, '
'plugins={1.plugins!r}, '
'verbose={1.verbose_results!r}, '
'pastebin={1.pastebin!r}, '
'remote_data={1.remote_data!r}, '
'pep8={1.pep8!r}, '
'pdb={1.pdb!r}, '
'open_files={1.open_files!r}, '
'parallel={1.parallel!r}, '
'docs_path={1.docs_path!r}, '
'skip_docs={1.skip_docs!r}, '
'add_local_eggs_to_path=True, ' # see _build_temp_install below
'repeat={1.repeat!r})); '
'{cmd_post}'
'sys.exit(result)')
return cmd.format(set_flag, self, cmd_pre=cmd_pre, cmd_post=cmd_post)
def run(self):
"""
Run the tests!
"""
# Install the runtime dependencies.
if self.distribution.install_requires:
self.distribution.fetch_build_eggs(self.distribution.install_requires)
# Ensure there is a doc path
if self.docs_path is None:
cfg_docs_dir = self.distribution.get_option_dict('build_docs').get('source_dir', None)
# Some affiliated packages use this.
# See astropy/package-template#157
if cfg_docs_dir is not None and os.path.exists(cfg_docs_dir[1]):
self.docs_path = os.path.abspath(cfg_docs_dir[1])
# fall back on a default path of "docs"
elif os.path.exists('docs'): # pragma: no cover
self.docs_path = os.path.abspath('docs')
# Build a testing install of the package
self._build_temp_install()
# Install the test dependencies
# NOTE: we do this here after _build_temp_install because there is
# a weird but which occurs if psutil is installed in this way before
# astropy is built, Cython can have segmentation fault. Strange, eh?
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
# Copy any additional dependencies that may have been installed via
# tests_requires or install_requires. We then pass the
# add_local_eggs_to_path=True option to package.test() to make sure the
# eggs get included in the path.
if os.path.exists('.eggs'):
shutil.copytree('.eggs', os.path.join(self.testing_path, '.eggs'))
# This option exists so that we can make sure that the tests don't
# write to an installed location.
if self.readonly:
log.info('changing permissions of temporary installation to read-only')
self._change_permissions_testing_path(writable=False)
# Run everything in a try: finally: so that the tmp dir gets deleted.
try:
# Construct this modules testing command
cmd = self.generate_testing_command()
# Run the tests in a subprocess--this is necessary since
# new extension modules may have appeared, and this is the
# easiest way to set up a new environment
testproc = subprocess.Popen(
[sys.executable, '-c', cmd],
cwd=self.testing_path, close_fds=False)
retcode = testproc.wait()
except KeyboardInterrupt:
import signal
# If a keyboard interrupt is handled, pass it to the test
# subprocess to prompt pytest to initiate its teardown
testproc.send_signal(signal.SIGINT)
retcode = testproc.wait()
finally:
# Remove temporary directory
if self.readonly:
self._change_permissions_testing_path(writable=True)
shutil.rmtree(self.tmp_dir)
raise SystemExit(retcode)
def _build_temp_install(self):
"""
Install the package and to a temporary directory for the purposes of
testing. This allows us to test the install command, include the
entry points, and also avoids creating pyc and __pycache__ directories
inside the build directory
"""
# On OSX the default path for temp files is under /var, but in most
# cases on OSX /var is actually a symlink to /private/var; ensure we
# dereference that link, because pytest is very sensitive to relative
# paths...
tmp_dir = tempfile.mkdtemp(prefix=self.package_name + '-test-',
dir=self.temp_root)
self.tmp_dir = os.path.realpath(tmp_dir)
log.info(f'installing to temporary directory: {self.tmp_dir}')
# We now install the package to the temporary directory. We do this
# rather than build and copy because this will ensure that e.g. entry
# points work.
self.reinitialize_command('install')
install_cmd = self.distribution.get_command_obj('install')
install_cmd.prefix = self.tmp_dir
if self.verbose_install:
self.run_command('install')
else:
with _suppress_stdout():
self.run_command('install')
# We now get the path to the site-packages directory that was created
# inside self.tmp_dir
install_cmd = self.get_finalized_command('install')
self.testing_path = install_cmd.install_lib
# Ideally, docs_path is set properly in run(), but if it is still
# not set here, do not pretend it is, otherwise bad things happen.
# See astropy/package-template#157
if self.docs_path is not None:
new_docs_path = os.path.join(self.testing_path,
os.path.basename(self.docs_path))
shutil.copytree(self.docs_path, new_docs_path)
self.docs_path = new_docs_path
shutil.copy('setup.cfg', self.testing_path)
def _change_permissions_testing_path(self, writable=False):
if writable:
basic_flags = stat.S_IRUSR | stat.S_IWUSR
else:
basic_flags = stat.S_IRUSR
for root, dirs, files in os.walk(self.testing_path):
for dirname in dirs:
os.chmod(os.path.join(root, dirname), basic_flags | stat.S_IXUSR)
for filename in files:
os.chmod(os.path.join(root, filename), basic_flags)
def _generate_coverage_commands(self):
"""
This method creates the post and pre commands if coverage is to be
generated
"""
if self.parallel != 0:
raise ValueError(
"--coverage can not be used with --parallel")
try:
import coverage # pylint: disable=W0611
except ImportError:
raise ImportError(
"--coverage requires that the coverage package is "
"installed.")
# Don't use get_pkg_data_filename here, because it
# requires importing astropy.config and thus screwing
# up coverage results for those packages.
coveragerc = os.path.join(
self.testing_path, self.package_name.replace('.', '/'),
'tests', 'coveragerc')
with open(coveragerc, 'r') as fd:
coveragerc_content = fd.read()
coveragerc_content = coveragerc_content.replace(
"{packagename}", self.package_name.replace('.', '/'))
tmp_coveragerc = os.path.join(self.tmp_dir, 'coveragerc')
with open(tmp_coveragerc, 'wb') as tmp:
tmp.write(coveragerc_content.encode('utf-8'))
cmd_pre = (
'import coverage; '
'cov = coverage.coverage(data_file=r"{}", config_file=r"{}"); '
'cov.start();'.format(
os.path.abspath(".coverage"), os.path.abspath(tmp_coveragerc)))
cmd_post = (
'cov.stop(); '
'from astropy.tests.helper import _save_coverage; '
'_save_coverage(cov, result, r"{}", r"{}");'.format(
os.path.abspath('.'), os.path.abspath(self.testing_path)))
return cmd_pre, cmd_post
| bsd-3-clause |
ryfeus/lambda-packs | H2O/ArchiveH2O/requests/auth.py | 73 | 10206 | # -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
import threading
import warnings
from base64 import b64encode
from .compat import urlparse, str, basestring
from .cookies import extract_cookies_to_jar
from ._internal_utils import to_native_string
from .utils import parse_dict_header
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
# "I want us to put a big-ol' comment on top of it that
# says that this behaviour is dumb but we need to preserve
# it because people are relying on it."
# - Lukasa
#
# These are here solely to maintain backwards compatibility
# for things like ints. This will be removed in 3.0.0.
if not isinstance(username, basestring):
warnings.warn(
"Non-string usernames will no longer be supported in Requests "
"3.0.0. Please convert the object you've passed in ({!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(username),
category=DeprecationWarning,
)
username = str(username)
if not isinstance(password, basestring):
warnings.warn(
"Non-string passwords will no longer be supported in Requests "
"3.0.0. Please convert the object you've passed in ({!r}) to "
"a string or bytes object in the near future to avoid "
"problems.".format(password),
category=DeprecationWarning,
)
password = str(password)
# -- End Removal --
if isinstance(username, str):
username = username.encode('latin1')
if isinstance(password, str):
password = password.encode('latin1')
authstr = 'Basic ' + to_native_string(
b64encode(b':'.join((username, password))).strip()
)
return authstr
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __eq__(self, other):
return all([
self.username == getattr(other, 'username', None),
self.password == getattr(other, 'password', None)
])
def __ne__(self, other):
return not self == other
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
# Keep state in per-thread local storage
self._thread_local = threading.local()
def init_per_thread_state(self):
# Ensure state is initialized just once per-thread
if not hasattr(self._thread_local, 'init'):
self._thread_local.init = True
self._thread_local.last_nonce = ''
self._thread_local.nonce_count = 0
self._thread_local.chal = {}
self._thread_local.pos = None
self._thread_local.num_401_calls = None
def build_digest_header(self, method, url):
"""
:rtype: str
"""
realm = self._thread_local.chal['realm']
nonce = self._thread_local.chal['nonce']
qop = self._thread_local.chal.get('qop')
algorithm = self._thread_local.chal.get('algorithm')
opaque = self._thread_local.chal.get('opaque')
hash_utf8 = None
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
elif _algorithm == 'SHA-256':
def sha256_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha256(x).hexdigest()
hash_utf8 = sha256_utf8
elif _algorithm == 'SHA-512':
def sha512_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha512(x).hexdigest()
hash_utf8 = sha512_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
#: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self._thread_local.last_nonce:
self._thread_local.nonce_count += 1
else:
self._thread_local.nonce_count = 1
ncvalue = '%08x' % self._thread_local.nonce_count
s = str(self._thread_local.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if not qop:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self._thread_local.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_redirect(self, r, **kwargs):
"""Reset num_401_calls counter on redirects."""
if r.is_redirect:
self._thread_local.num_401_calls = 1
def handle_401(self, r, **kwargs):
"""
Takes the given response and tries digest-auth, if needed.
:rtype: requests.Response
"""
# If response is not 4xx, do not auth
# See https://github.com/requests/requests/issues/3772
if not 400 <= r.status_code < 500:
self._thread_local.num_401_calls = 1
return r
if self._thread_local.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self._thread_local.pos)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
self._thread_local.num_401_calls += 1
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.close()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
self._thread_local.num_401_calls = 1
return r
def __call__(self, r):
# Initialize per-thread state, if needed
self.init_per_thread_state()
# If we have a saved nonce, skip the 401
if self._thread_local.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
try:
self._thread_local.pos = r.body.tell()
except AttributeError:
# In the case of HTTPDigestAuth being reused and the body of
# the previous request was a file-like object, pos has the
# file position of the previous body. Ensure it's set to
# None.
self._thread_local.pos = None
r.register_hook('response', self.handle_401)
r.register_hook('response', self.handle_redirect)
self._thread_local.num_401_calls = 1
return r
def __eq__(self, other):
return all([
self.username == getattr(other, 'username', None),
self.password == getattr(other, 'password', None)
])
def __ne__(self, other):
return not self == other
| mit |
brokenjacobs/ansible | lib/ansible/constants.py | 8 | 2934 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from string import ascii_letters, digits
from ansible.module_utils._text import to_text
from ansible.config.manager import ConfigManager
config = ConfigManager()
# Generate constants from config
for setting in config.data.get_settings():
vars()[setting.name] = setting.value
def mk_boolean(value):
''' moved '''
return config.make_boolean(value)
# ### CONSTANTS ### yes, actual ones
BLACKLIST_EXTS = ('.pyc', '.pyo', '.swp', '.bak', '~', '.rpm', '.md', '.txt')
BECOME_METHODS = ['sudo', 'su', 'pbrun', 'pfexec', 'doas', 'dzdo', 'ksu', 'runas', 'pmrun']
BECOME_ERROR_STRINGS = {
'sudo': 'Sorry, try again.',
'su': 'Authentication failure',
'pbrun': '',
'pfexec': '',
'doas': 'Permission denied',
'dzdo': '',
'ksu': 'Password incorrect',
'pmrun': 'You are not permitted to run this command'
} # FIXME: deal with i18n
BECOME_MISSING_STRINGS = {
'sudo': 'sorry, a password is required to run sudo',
'su': '',
'pbrun': '',
'pfexec': '',
'doas': 'Authorization required',
'dzdo': '',
'ksu': 'No password given',
'pmrun': ''
} # FIXME: deal with i18n
BOOL_TRUE = config.data.BOOL_TRUE
DEFAULT_BECOME_PASS = None
DEFAULT_PASSWORD_CHARS = to_text(ascii_letters + digits + ".,:-_", errors='strict') # characters included in auto-generated passwords
DEFAULT_SUDO_PASS = None
DEFAULT_REMOTE_PASS = None
DEFAULT_SUBSET = None
DEFAULT_SU_PASS = None
IGNORE_FILES = ["COPYING", "CONTRIBUTING", "LICENSE", "README", "VERSION", "GUIDELINES"] # ignore during module search
INTERNAL_RESULT_KEYS = ['add_host', 'add_group']
LOCALHOST = frozenset(['127.0.0.1', 'localhost', '::1'])
MODULE_REQUIRE_ARGS = ['command', 'win_command', 'shell', 'win_shell', 'raw', 'script']
MODULE_NO_JSON = ['command', 'win_command', 'shell', 'win_shell', 'raw']
RESTRICTED_RESULT_KEYS = ['ansible_rsync_path', 'ansible_playbook_python']
TREE_DIR = None
VAULT_VERSION_MIN = 1.0
VAULT_VERSION_MAX = 1.0
YAML_FILENAME_EXTENSIONS = [".yml", ".yaml", ".json"] # check all of these extensions when looking for 'variable' files which should be YAML or JSON.
| gpl-3.0 |
dhimmel/networkx | networkx/convert_matrix.py | 13 | 33243 | """Functions to convert NetworkX graphs to and from numpy/scipy matrices.
The preferred way of converting data to a NetworkX graph is through the
graph constuctor. The constructor calls the to_networkx_graph() function
which attempts to guess the input type and convert it automatically.
Examples
--------
Create a 10 node random graph from a numpy matrix
>>> import numpy
>>> a = numpy.reshape(numpy.random.random_integers(0,1,size=100),(10,10))
>>> D = nx.DiGraph(a)
or equivalently
>>> D = nx.to_networkx_graph(a,create_using=nx.DiGraph())
See Also
--------
nx_pygraphviz, nx_pydot
"""
# Copyright (C) 2006-2014 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import warnings
import itertools
import networkx as nx
from networkx.convert import _prep_create_using
from networkx.utils import not_implemented_for
__author__ = """\n""".join(['Aric Hagberg <aric.hagberg@gmail.com>',
'Pieter Swart (swart@lanl.gov)',
'Dan Schult(dschult@colgate.edu)'])
__all__ = ['from_numpy_matrix', 'to_numpy_matrix',
'from_pandas_dataframe', 'to_pandas_dataframe',
'to_numpy_recarray',
'from_scipy_sparse_matrix', 'to_scipy_sparse_matrix']
def to_pandas_dataframe(G, nodelist=None, multigraph_weight=sum, weight='weight', nonedge=0.0):
"""Return the graph adjacency matrix as a Pandas DataFrame.
Parameters
----------
G : graph
The NetworkX graph used to construct the Pandas DataFrame.
nodelist : list, optional
The rows and columns are ordered according to the nodes in `nodelist`.
If `nodelist` is None, then the ordering is produced by G.nodes().
multigraph_weight : {sum, min, max}, optional
An operator that determines how weights in multigraphs are handled.
The default is to sum the weights of the multiple edges.
weight : string or None, optional
The edge attribute that holds the numerical value used for
the edge weight. If an edge does not have that attribute, then the
value 1 is used instead.
nonedge : float, optional
The matrix values corresponding to nonedges are typically set to zero.
However, this could be undesirable if there are matrix values
corresponding to actual edges that also have the value zero. If so,
one might prefer nonedges to have some other value, such as nan.
Returns
-------
df : Pandas DataFrame
Graph adjacency matrix
Notes
-----
The DataFrame entries are assigned to the weight edge attribute. When
an edge does not have a weight attribute, the value of the entry is set to
the number 1. For multiple (parallel) edges, the values of the entries
are determined by the 'multigraph_weight' parameter. The default is to
sum the weight attributes for each of the parallel edges.
When `nodelist` does not contain every node in `G`, the matrix is built
from the subgraph of `G` that is induced by the nodes in `nodelist`.
The convention used for self-loop edges in graphs is to assign the
diagonal matrix entry value to the weight attribute of the edge
(or the number 1 if the edge has no weight attribute). If the
alternate convention of doubling the edge weight is desired the
resulting Pandas DataFrame can be modified as follows:
>>> import pandas as pd
>>> import numpy as np
>>> G = nx.Graph([(1,1)])
>>> df = nx.to_pandas_dataframe(G)
>>> df
1
1 1
>>> df.values[np.diag_indices_from(df)] *= 2
>>> df
1
1 2
Examples
--------
>>> G = nx.MultiDiGraph()
>>> G.add_edge(0,1,weight=2)
>>> G.add_edge(1,0)
>>> G.add_edge(2,2,weight=3)
>>> G.add_edge(2,2)
>>> nx.to_pandas_dataframe(G, nodelist=[0,1,2])
0 1 2
0 0 2 0
1 1 0 0
2 0 0 4
"""
import pandas as pd
M = to_numpy_matrix(G, nodelist, None, None, multigraph_weight, weight, nonedge)
if nodelist is None:
nodelist = G.nodes()
nodeset = set(nodelist)
df = pd.DataFrame(data=M, index = nodelist ,columns = nodelist)
return df
def from_pandas_dataframe(df, source, target, edge_attr=None,
create_using=None):
"""Return a graph from Pandas DataFrame.
The Pandas DataFrame should contain at least two columns of node names and
zero or more columns of node attributes. Each row will be processed as one
edge instance.
Note: This function iterates over DataFrame.values, which is not
guaranteed to retain the data type across columns in the row. This is only
a problem if your row is entirely numeric and a mix of ints and floats. In
that case, all values will be returned as floats. See the
DataFrame.iterrows documentation for an example.
Parameters
----------
df : Pandas DataFrame
An edge list representation of a graph
source : str or int
A valid column name (string or iteger) for the source nodes (for the
directed case).
target : str or int
A valid column name (string or iteger) for the target nodes (for the
directed case).
edge_attr : str or int, iterable, True
A valid column name (str or integer) or list of column names that will
be used to retrieve items from the row and add them to the graph as edge
attributes. If `True`, all of the remaining columns will be added.
create_using : NetworkX graph
Use specified graph for result. The default is Graph()
See Also
--------
to_pandas_dataframe
Examples
--------
Simple integer weights on edges:
>>> import pandas as pd
>>> import numpy as np
>>> r = np.random.RandomState(seed=5)
>>> ints = r.random_integers(1, 10, size=(3,2))
>>> a = ['A', 'B', 'C']
>>> b = ['D', 'A', 'E']
>>> df = pd.DataFrame(ints, columns=['weight', 'cost'])
>>> df[0] = a
>>> df['b'] = b
>>> df
weight cost 0 b
0 4 7 A D
1 7 1 B A
2 10 9 C E
>>> G=nx.from_pandas_dataframe(df, 0, 'b', ['weight', 'cost'])
>>> G['E']['C']['weight']
10
>>> G['E']['C']['cost']
9
"""
g = _prep_create_using(create_using)
# Index of source and target
src_i = df.columns.get_loc(source)
tar_i = df.columns.get_loc(target)
if edge_attr:
# If all additional columns requested, build up a list of tuples
# [(name, index),...]
if edge_attr is True:
# Create a list of all columns indices, ignore nodes
edge_i = []
for i, col in enumerate(df.columns):
if col is not source and col is not target:
edge_i.append((col, i))
# If a list or tuple of name is requested
elif isinstance(edge_attr, (list, tuple)):
edge_i = [(i, df.columns.get_loc(i)) for i in edge_attr]
# If a string or int is passed
else:
edge_i = [(edge_attr, df.columns.get_loc(edge_attr)),]
# Iteration on values returns the rows as Numpy arrays
for row in df.values:
g.add_edge(row[src_i], row[tar_i], {i:row[j] for i, j in edge_i})
# If no column names are given, then just return the edges.
else:
for row in df.values:
g.add_edge(row[src_i], row[tar_i])
return g
def to_numpy_matrix(G, nodelist=None, dtype=None, order=None,
multigraph_weight=sum, weight='weight', nonedge=0.0):
"""Return the graph adjacency matrix as a NumPy matrix.
Parameters
----------
G : graph
The NetworkX graph used to construct the NumPy matrix.
nodelist : list, optional
The rows and columns are ordered according to the nodes in ``nodelist``.
If ``nodelist`` is None, then the ordering is produced by G.nodes().
dtype : NumPy data type, optional
A valid single NumPy data type used to initialize the array.
This must be a simple type such as int or numpy.float64 and
not a compound data type (see to_numpy_recarray)
If None, then the NumPy default is used.
order : {'C', 'F'}, optional
Whether to store multidimensional data in C- or Fortran-contiguous
(row- or column-wise) order in memory. If None, then the NumPy default
is used.
multigraph_weight : {sum, min, max}, optional
An operator that determines how weights in multigraphs are handled.
The default is to sum the weights of the multiple edges.
weight : string or None optional (default = 'weight')
The edge attribute that holds the numerical value used for
the edge weight. If an edge does not have that attribute, then the
value 1 is used instead.
nonedge : float (default = 0.0)
The matrix values corresponding to nonedges are typically set to zero.
However, this could be undesirable if there are matrix values
corresponding to actual edges that also have the value zero. If so,
one might prefer nonedges to have some other value, such as nan.
Returns
-------
M : NumPy matrix
Graph adjacency matrix
See Also
--------
to_numpy_recarray, from_numpy_matrix
Notes
-----
The matrix entries are assigned to the weight edge attribute. When
an edge does not have a weight attribute, the value of the entry is set to
the number 1. For multiple (parallel) edges, the values of the entries
are determined by the ``multigraph_weight`` parameter. The default is to
sum the weight attributes for each of the parallel edges.
When ``nodelist`` does not contain every node in ``G``, the matrix is built
from the subgraph of ``G`` that is induced by the nodes in ``nodelist``.
The convention used for self-loop edges in graphs is to assign the
diagonal matrix entry value to the weight attribute of the edge
(or the number 1 if the edge has no weight attribute). If the
alternate convention of doubling the edge weight is desired the
resulting Numpy matrix can be modified as follows:
>>> import numpy as np
>>> G = nx.Graph([(1, 1)])
>>> A = nx.to_numpy_matrix(G)
>>> A
matrix([[ 1.]])
>>> A.A[np.diag_indices_from(A)] *= 2
>>> A
matrix([[ 2.]])
Examples
--------
>>> G = nx.MultiDiGraph()
>>> G.add_edge(0,1,weight=2)
>>> G.add_edge(1,0)
>>> G.add_edge(2,2,weight=3)
>>> G.add_edge(2,2)
>>> nx.to_numpy_matrix(G, nodelist=[0,1,2])
matrix([[ 0., 2., 0.],
[ 1., 0., 0.],
[ 0., 0., 4.]])
"""
import numpy as np
if nodelist is None:
nodelist = G.nodes()
nodeset = set(nodelist)
if len(nodelist) != len(nodeset):
msg = "Ambiguous ordering: `nodelist` contained duplicates."
raise nx.NetworkXError(msg)
nlen=len(nodelist)
undirected = not G.is_directed()
index=dict(zip(nodelist,range(nlen)))
# Initially, we start with an array of nans. Then we populate the matrix
# using data from the graph. Afterwards, any leftover nans will be
# converted to the value of `nonedge`. Note, we use nans initially,
# instead of zero, for two reasons:
#
# 1) It can be important to distinguish a real edge with the value 0
# from a nonedge with the value 0.
#
# 2) When working with multi(di)graphs, we must combine the values of all
# edges between any two nodes in some manner. This often takes the
# form of a sum, min, or max. Using the value 0 for a nonedge would
# have undesirable effects with min and max, but using nanmin and
# nanmax with initially nan values is not problematic at all.
#
# That said, there are still some drawbacks to this approach. Namely, if
# a real edge is nan, then that value is a) not distinguishable from
# nonedges and b) is ignored by the default combinator (nansum, nanmin,
# nanmax) functions used for multi(di)graphs. If this becomes an issue,
# an alternative approach is to use masked arrays. Initially, every
# element is masked and set to some `initial` value. As we populate the
# graph, elements are unmasked (automatically) when we combine the initial
# value with the values given by real edges. At the end, we convert all
# masked values to `nonedge`. Using masked arrays fully addresses reason 1,
# but for reason 2, we would still have the issue with min and max if the
# initial values were 0.0. Note: an initial value of +inf is appropriate
# for min, while an initial value of -inf is appropriate for max. When
# working with sum, an initial value of zero is appropriate. Ideally then,
# we'd want to allow users to specify both a value for nonedges and also
# an initial value. For multi(di)graphs, the choice of the initial value
# will, in general, depend on the combinator function---sensible defaults
# can be provided.
if G.is_multigraph():
# Handle MultiGraphs and MultiDiGraphs
M = np.zeros((nlen, nlen), dtype=dtype, order=order) + np.nan
# use numpy nan-aware operations
operator={sum:np.nansum, min:np.nanmin, max:np.nanmax}
try:
op=operator[multigraph_weight]
except:
raise ValueError('multigraph_weight must be sum, min, or max')
for u,v,attrs in G.edges_iter(data=True):
if (u in nodeset) and (v in nodeset):
i, j = index[u], index[v]
e_weight = attrs.get(weight, 1)
M[i,j] = op([e_weight, M[i,j]])
if undirected:
M[j,i] = M[i,j]
else:
# Graph or DiGraph, this is much faster than above
M = np.zeros((nlen,nlen), dtype=dtype, order=order) + np.nan
for u,nbrdict in G.adjacency_iter():
for v,d in nbrdict.items():
try:
M[index[u],index[v]] = d.get(weight,1)
except KeyError:
# This occurs when there are fewer desired nodes than
# there are nodes in the graph: len(nodelist) < len(G)
pass
M[np.isnan(M)] = nonedge
M = np.asmatrix(M)
return M
def from_numpy_matrix(A, parallel_edges=False, create_using=None):
"""Return a graph from numpy matrix.
The numpy matrix is interpreted as an adjacency matrix for the graph.
Parameters
----------
A : numpy matrix
An adjacency matrix representation of a graph
parallel_edges : Boolean
If this is ``True``, ``create_using`` is a multigraph, and ``A`` is an
integer matrix, then entry *(i, j)* in the matrix is interpreted as the
number of parallel edges joining vertices *i* and *j* in the graph. If it
is ``False``, then the entries in the adjacency matrix are interpreted as
the weight of a single edge joining the vertices.
create_using : NetworkX graph
Use specified graph for result. The default is Graph()
Notes
-----
If ``create_using`` is an instance of :class:`networkx.MultiGraph` or
:class:`networkx.MultiDiGraph`, ``parallel_edges`` is ``True``, and the
entries of ``A`` are of type ``int``, then this function returns a multigraph
(of the same type as ``create_using``) with parallel edges.
If ``create_using`` is an undirected multigraph, then only the edges
indicated by the upper triangle of the matrix `A` will be added to the
graph.
If the numpy matrix has a single data type for each matrix entry it
will be converted to an appropriate Python data type.
If the numpy matrix has a user-specified compound data type the names
of the data fields will be used as attribute keys in the resulting
NetworkX graph.
See Also
--------
to_numpy_matrix, to_numpy_recarray
Examples
--------
Simple integer weights on edges:
>>> import numpy
>>> A=numpy.matrix([[1, 1], [2, 1]])
>>> G=nx.from_numpy_matrix(A)
If ``create_using`` is a multigraph and the matrix has only integer entries,
the entries will be interpreted as weighted edges joining the vertices
(without creating parallel edges):
>>> import numpy
>>> A = numpy.matrix([[1, 1], [1, 2]])
>>> G = nx.from_numpy_matrix(A, create_using = nx.MultiGraph())
>>> G[1][1]
{0: {'weight': 2}}
If ``create_using`` is a multigraph and the matrix has only integer entries
but ``parallel_edges`` is ``True``, then the entries will be interpreted as
the number of parallel edges joining those two vertices:
>>> import numpy
>>> A = numpy.matrix([[1, 1], [1, 2]])
>>> temp = nx.MultiGraph()
>>> G = nx.from_numpy_matrix(A, parallel_edges = True, create_using = temp)
>>> G[1][1]
{0: {'weight': 1}, 1: {'weight': 1}}
User defined compound data type on edges:
>>> import numpy
>>> dt = [('weight', float), ('cost', int)]
>>> A = numpy.matrix([[(1.0, 2)]], dtype = dt)
>>> G = nx.from_numpy_matrix(A)
>>> G.edges()
[(0, 0)]
>>> G[0][0]['cost']
2
>>> G[0][0]['weight']
1.0
"""
# This should never fail if you have created a numpy matrix with numpy...
import numpy as np
kind_to_python_type={'f':float,
'i':int,
'u':int,
'b':bool,
'c':complex,
'S':str,
'V':'void'}
try: # Python 3.x
blurb = chr(1245) # just to trigger the exception
kind_to_python_type['U']=str
except ValueError: # Python 2.6+
kind_to_python_type['U']=unicode
G=_prep_create_using(create_using)
n,m=A.shape
if n!=m:
raise nx.NetworkXError("Adjacency matrix is not square.",
"nx,ny=%s"%(A.shape,))
dt=A.dtype
try:
python_type=kind_to_python_type[dt.kind]
except:
raise TypeError("Unknown numpy data type: %s"%dt)
# Make sure we get even the isolated nodes of the graph.
G.add_nodes_from(range(n))
# Get a list of all the entries in the matrix with nonzero entries. These
# coordinates will become the edges in the graph.
edges = zip(*(np.asarray(A).nonzero()))
# handle numpy constructed data type
if python_type is 'void':
# Sort the fields by their offset, then by dtype, then by name.
fields = sorted((offset, dtype, name) for name, (dtype, offset) in
A.dtype.fields.items())
triples = ((u, v, {name: kind_to_python_type[dtype.kind](val)
for (_, dtype, name), val in zip(fields, A[u, v])})
for u, v in edges)
# If the entries in the adjacency matrix are integers, the graph is a
# multigraph, and parallel_edges is True, then create parallel edges, each
# with weight 1, for each entry in the adjacency matrix. Otherwise, create
# one edge for each positive entry in the adjacency matrix and set the
# weight of that edge to be the entry in the matrix.
elif python_type is int and G.is_multigraph() and parallel_edges:
chain = itertools.chain.from_iterable
# The following line is equivalent to:
#
# for (u, v) in edges:
# for d in range(A[u, v]):
# G.add_edge(u, v, weight=1)
#
triples = chain(((u, v, dict(weight=1)) for d in range(A[u, v]))
for (u, v) in edges)
else: # basic data type
triples = ((u, v, dict(weight=python_type(A[u, v])))
for u, v in edges)
# If we are creating an undirected multigraph, only add the edges from the
# upper triangle of the matrix. Otherwise, add all the edges. This relies
# on the fact that the vertices created in the
# ``_generated_weighted_edges()`` function are actually the row/column
# indices for the matrix ``A``.
#
# Without this check, we run into a problem where each edge is added twice
# when ``G.add_edges_from()`` is invoked below.
if G.is_multigraph() and not G.is_directed():
triples = ((u, v, d) for u, v, d in triples if u <= v)
G.add_edges_from(triples)
return G
@not_implemented_for('multigraph')
def to_numpy_recarray(G,nodelist=None,
dtype=[('weight',float)],
order=None):
"""Return the graph adjacency matrix as a NumPy recarray.
Parameters
----------
G : graph
The NetworkX graph used to construct the NumPy matrix.
nodelist : list, optional
The rows and columns are ordered according to the nodes in `nodelist`.
If `nodelist` is None, then the ordering is produced by G.nodes().
dtype : NumPy data-type, optional
A valid NumPy named dtype used to initialize the NumPy recarray.
The data type names are assumed to be keys in the graph edge attribute
dictionary.
order : {'C', 'F'}, optional
Whether to store multidimensional data in C- or Fortran-contiguous
(row- or column-wise) order in memory. If None, then the NumPy default
is used.
Returns
-------
M : NumPy recarray
The graph with specified edge data as a Numpy recarray
Notes
-----
When `nodelist` does not contain every node in `G`, the matrix is built
from the subgraph of `G` that is induced by the nodes in `nodelist`.
Examples
--------
>>> G = nx.Graph()
>>> G.add_edge(1,2,weight=7.0,cost=5)
>>> A=nx.to_numpy_recarray(G,dtype=[('weight',float),('cost',int)])
>>> print(A.weight)
[[ 0. 7.]
[ 7. 0.]]
>>> print(A.cost)
[[0 5]
[5 0]]
"""
import numpy as np
if nodelist is None:
nodelist = G.nodes()
nodeset = set(nodelist)
if len(nodelist) != len(nodeset):
msg = "Ambiguous ordering: `nodelist` contained duplicates."
raise nx.NetworkXError(msg)
nlen=len(nodelist)
undirected = not G.is_directed()
index=dict(zip(nodelist,range(nlen)))
M = np.zeros((nlen,nlen), dtype=dtype, order=order)
names=M.dtype.names
for u,v,attrs in G.edges_iter(data=True):
if (u in nodeset) and (v in nodeset):
i,j = index[u],index[v]
values=tuple([attrs[n] for n in names])
M[i,j] = values
if undirected:
M[j,i] = M[i,j]
return M.view(np.recarray)
def to_scipy_sparse_matrix(G, nodelist=None, dtype=None,
weight='weight', format='csr'):
"""Return the graph adjacency matrix as a SciPy sparse matrix.
Parameters
----------
G : graph
The NetworkX graph used to construct the NumPy matrix.
nodelist : list, optional
The rows and columns are ordered according to the nodes in `nodelist`.
If `nodelist` is None, then the ordering is produced by G.nodes().
dtype : NumPy data-type, optional
A valid NumPy dtype used to initialize the array. If None, then the
NumPy default is used.
weight : string or None optional (default='weight')
The edge attribute that holds the numerical value used for
the edge weight. If None then all edge weights are 1.
format : str in {'bsr', 'csr', 'csc', 'coo', 'lil', 'dia', 'dok'}
The type of the matrix to be returned (default 'csr'). For
some algorithms different implementations of sparse matrices
can perform better. See [1]_ for details.
Returns
-------
M : SciPy sparse matrix
Graph adjacency matrix.
Notes
-----
The matrix entries are populated using the edge attribute held in
parameter weight. When an edge does not have that attribute, the
value of the entry is 1.
For multiple edges the matrix values are the sums of the edge weights.
When `nodelist` does not contain every node in `G`, the matrix is built
from the subgraph of `G` that is induced by the nodes in `nodelist`.
Uses coo_matrix format. To convert to other formats specify the
format= keyword.
The convention used for self-loop edges in graphs is to assign the
diagonal matrix entry value to the weight attribute of the edge
(or the number 1 if the edge has no weight attribute). If the
alternate convention of doubling the edge weight is desired the
resulting Scipy sparse matrix can be modified as follows:
>>> import scipy as sp
>>> G = nx.Graph([(1,1)])
>>> A = nx.to_scipy_sparse_matrix(G)
>>> print(A.todense())
[[1]]
>>> A.setdiag(A.diagonal()*2)
>>> print(A.todense())
[[2]]
Examples
--------
>>> G = nx.MultiDiGraph()
>>> G.add_edge(0,1,weight=2)
>>> G.add_edge(1,0)
>>> G.add_edge(2,2,weight=3)
>>> G.add_edge(2,2)
>>> S = nx.to_scipy_sparse_matrix(G, nodelist=[0,1,2])
>>> print(S.todense())
[[0 2 0]
[1 0 0]
[0 0 4]]
References
----------
.. [1] Scipy Dev. References, "Sparse Matrices",
http://docs.scipy.org/doc/scipy/reference/sparse.html
"""
from scipy import sparse
if nodelist is None:
nodelist = G
nlen = len(nodelist)
if nlen == 0:
raise nx.NetworkXError("Graph has no nodes or edges")
if len(nodelist) != len(set(nodelist)):
msg = "Ambiguous ordering: `nodelist` contained duplicates."
raise nx.NetworkXError(msg)
index = dict(zip(nodelist,range(nlen)))
if G.number_of_edges() == 0:
row,col,data=[],[],[]
else:
row,col,data = zip(*((index[u],index[v],d.get(weight,1))
for u,v,d in G.edges_iter(nodelist, data=True)
if u in index and v in index))
if G.is_directed():
M = sparse.coo_matrix((data,(row,col)),
shape=(nlen,nlen), dtype=dtype)
else:
# symmetrize matrix
d = data + data
r = row + col
c = col + row
# selfloop entries get double counted when symmetrizing
# so we subtract the data on the diagonal
selfloops = G.selfloop_edges(data=True)
if selfloops:
diag_index,diag_data = zip(*((index[u],-d.get(weight,1))
for u,v,d in selfloops
if u in index and v in index))
d += diag_data
r += diag_index
c += diag_index
M = sparse.coo_matrix((d, (r, c)), shape=(nlen,nlen), dtype=dtype)
try:
return M.asformat(format)
except AttributeError:
raise nx.NetworkXError("Unknown sparse matrix format: %s"%format)
def _csr_gen_triples(A):
"""Converts a SciPy sparse matrix in **Compressed Sparse Row** format to
an iterable of weighted edge triples.
"""
nrows = A.shape[0]
data, indices, indptr = A.data, A.indices, A.indptr
for i in range(nrows):
for j in range(indptr[i], indptr[i+1]):
yield i, indices[j], data[j]
def _csc_gen_triples(A):
"""Converts a SciPy sparse matrix in **Compressed Sparse Column** format to
an iterable of weighted edge triples.
"""
ncols = A.shape[1]
data, indices, indptr = A.data, A.indices, A.indptr
for i in range(ncols):
for j in range(indptr[i], indptr[i+1]):
yield indices[j], i, data[j]
def _coo_gen_triples(A):
"""Converts a SciPy sparse matrix in **Coordinate** format to an iterable
of weighted edge triples.
"""
row, col, data = A.row, A.col, A.data
return zip(row, col, data)
def _dok_gen_triples(A):
"""Converts a SciPy sparse matrix in **Dictionary of Keys** format to an
iterable of weighted edge triples.
"""
for (r, c), v in A.items():
yield r, c, v
def _generate_weighted_edges(A):
"""Returns an iterable over (u, v, w) triples, where u and v are adjacent
vertices and w is the weight of the edge joining u and v.
`A` is a SciPy sparse matrix (in any format).
"""
if A.format == 'csr':
return _csr_gen_triples(A)
if A.format == 'csc':
return _csc_gen_triples(A)
if A.format == 'dok':
return _dok_gen_triples(A)
# If A is in any other format (including COO), convert it to COO format.
return _coo_gen_triples(A.tocoo())
def from_scipy_sparse_matrix(A, parallel_edges=False, create_using=None,
edge_attribute='weight'):
"""Creates a new graph from an adjacency matrix given as a SciPy sparse
matrix.
Parameters
----------
A: scipy sparse matrix
An adjacency matrix representation of a graph
parallel_edges : Boolean
If this is ``True``, `create_using` is a multigraph, and `A` is an
integer matrix, then entry *(i, j)* in the matrix is interpreted as the
number of parallel edges joining vertices *i* and *j* in the graph. If it
is ``False``, then the entries in the adjacency matrix are interpreted as
the weight of a single edge joining the vertices.
create_using: NetworkX graph
Use specified graph for result. The default is Graph()
edge_attribute: string
Name of edge attribute to store matrix numeric value. The data will
have the same type as the matrix entry (int, float, (real,imag)).
Notes
-----
If `create_using` is an instance of :class:`networkx.MultiGraph` or
:class:`networkx.MultiDiGraph`, `parallel_edges` is ``True``, and the
entries of `A` are of type ``int``, then this function returns a multigraph
(of the same type as `create_using`) with parallel edges. In this case,
`edge_attribute` will be ignored.
If `create_using` is an undirected multigraph, then only the edges
indicated by the upper triangle of the matrix `A` will be added to the
graph.
Examples
--------
>>> import scipy.sparse
>>> A = scipy.sparse.eye(2,2,1)
>>> G = nx.from_scipy_sparse_matrix(A)
If `create_using` is a multigraph and the matrix has only integer entries,
the entries will be interpreted as weighted edges joining the vertices
(without creating parallel edges):
>>> import scipy
>>> A = scipy.sparse.csr_matrix([[1, 1], [1, 2]])
>>> G = nx.from_scipy_sparse_matrix(A, create_using=nx.MultiGraph())
>>> G[1][1]
{0: {'weight': 2}}
If `create_using` is a multigraph and the matrix has only integer entries
but `parallel_edges` is ``True``, then the entries will be interpreted as
the number of parallel edges joining those two vertices:
>>> import scipy
>>> A = scipy.sparse.csr_matrix([[1, 1], [1, 2]])
>>> G = nx.from_scipy_sparse_matrix(A, parallel_edges=True,
... create_using=nx.MultiGraph())
>>> G[1][1]
{0: {'weight': 1}, 1: {'weight': 1}}
"""
G = _prep_create_using(create_using)
n,m = A.shape
if n != m:
raise nx.NetworkXError(\
"Adjacency matrix is not square. nx,ny=%s"%(A.shape,))
# Make sure we get even the isolated nodes of the graph.
G.add_nodes_from(range(n))
# Create an iterable over (u, v, w) triples and for each triple, add an
# edge from u to v with weight w.
triples = _generate_weighted_edges(A)
# If the entries in the adjacency matrix are integers, the graph is a
# multigraph, and parallel_edges is True, then create parallel edges, each
# with weight 1, for each entry in the adjacency matrix. Otherwise, create
# one edge for each positive entry in the adjacency matrix and set the
# weight of that edge to be the entry in the matrix.
if A.dtype.kind in ('i', 'u') and G.is_multigraph() and parallel_edges:
chain = itertools.chain.from_iterable
# The following line is equivalent to:
#
# for (u, v) in edges:
# for d in range(A[u, v]):
# G.add_edge(u, v, weight=1)
#
triples = chain(((u, v, 1) for d in range(w)) for (u, v, w) in triples)
# If we are creating an undirected multigraph, only add the edges from the
# upper triangle of the matrix. Otherwise, add all the edges. This relies
# on the fact that the vertices created in the
# ``_generated_weighted_edges()`` function are actually the row/column
# indices for the matrix ``A``.
#
# Without this check, we run into a problem where each edge is added twice
# when `G.add_weighted_edges_from()` is invoked below.
if G.is_multigraph() and not G.is_directed():
triples = ((u, v, d) for u, v, d in triples if u <= v)
G.add_weighted_edges_from(triples, weight=edge_attribute)
return G
# fixture for nose tests
def setup_module(module):
from nose import SkipTest
try:
import numpy
except:
raise SkipTest("NumPy not available")
try:
import scipy
except:
raise SkipTest("SciPy not available")
| bsd-3-clause |
sekikn/incubator-airflow | tests/providers/google/marketing_platform/operators/test_search_ads_system.py | 10 | 1707 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from airflow.providers.google.marketing_platform.example_dags.example_search_ads import GCS_BUCKET
from tests.providers.google.cloud.utils.gcp_authenticator import GMP_KEY
from tests.test_utils.gcp_system_helpers import MARKETING_DAG_FOLDER, GoogleSystemTest, provide_gcp_context
# Requires the following scope:
SCOPES = [
"https://www.googleapis.com/auth/doubleclicksearch",
"https://www.googleapis.com/auth/cloud-platform",
]
@pytest.mark.system("google.marketing_platform")
@pytest.mark.credential_file(GMP_KEY)
class SearchAdsSystemTest(GoogleSystemTest):
def setUp(self):
super().setUp()
self.create_gcs_bucket(GCS_BUCKET)
def tearDown(self):
self.delete_gcs_bucket(GCS_BUCKET)
super().tearDown()
@provide_gcp_context(GMP_KEY, scopes=SCOPES)
def test_run_example_dag(self):
self.run_dag("example_search_ads", MARKETING_DAG_FOLDER)
| apache-2.0 |
telwertowski/QGIS | python/plugins/MetaSearch/dialogs/maindialog.py | 3 | 38847 | # -*- coding: utf-8 -*-
###############################################################################
#
# CSW Client
# ---------------------------------------------------------
# QGIS Catalog Service client.
#
# Copyright (C) 2010 NextGIS (http://nextgis.org),
# Alexander Bruy (alexander.bruy@gmail.com),
# Maxim Dubinin (sim@gis-lab.info)
#
# Copyright (C) 2017 Tom Kralidis (tomkralidis@gmail.com)
#
# This source is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This code is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
###############################################################################
import json
import os.path
import warnings
from urllib.request import build_opener, install_opener, ProxyHandler
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtWidgets import (QApplication, QDialog, QComboBox,
QDialogButtonBox, QMessageBox,
QTreeWidgetItem, QWidget)
from qgis.PyQt.QtGui import QColor, QCursor
from qgis.core import (QgsApplication, QgsCoordinateReferenceSystem,
QgsCoordinateTransform, QgsGeometry, QgsPointXY,
QgsProviderRegistry, QgsSettings, QgsProject)
from qgis.gui import QgsRubberBand, QgsGui
from qgis.utils import OverrideCursor
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=ResourceWarning)
warnings.filterwarnings("ignore", category=ImportWarning)
from owslib.csw import CatalogueServiceWeb # spellok
from owslib.fes import BBox, PropertyIsLike
from owslib.ows import ExceptionReport
from MetaSearch import link_types
from MetaSearch.dialogs.manageconnectionsdialog import ManageConnectionsDialog
from MetaSearch.dialogs.newconnectiondialog import NewConnectionDialog
from MetaSearch.dialogs.recorddialog import RecordDialog
from MetaSearch.dialogs.xmldialog import XMLDialog
from MetaSearch.util import (clean_ows_url, get_connections_from_file,
get_ui_class, get_help_url, highlight_xml,
normalize_text, open_url, render_template,
serialize_string, StaticContext)
BASE_CLASS = get_ui_class('maindialog.ui')
class MetaSearchDialog(QDialog, BASE_CLASS):
"""main dialogue"""
def __init__(self, iface):
"""init window"""
QDialog.__init__(self)
self.setupUi(self)
self.iface = iface
self.map = iface.mapCanvas()
self.settings = QgsSettings()
self.catalog = None
self.catalog_url = None
self.catalog_username = None
self.catalog_password = None
self.context = StaticContext()
self.leKeywords.setShowSearchIcon(True)
self.leKeywords.setPlaceholderText(self.tr('Search keywords'))
self.setWindowTitle(self.tr('MetaSearch'))
self.rubber_band = QgsRubberBand(self.map, True) # True = a polygon
self.rubber_band.setColor(QColor(255, 0, 0, 75))
self.rubber_band.setWidth(5)
# form inputs
self.startfrom = 0
self.maxrecords = 10
self.timeout = 10
self.constraints = []
# Servers tab
self.cmbConnectionsServices.activated.connect(self.save_connection)
self.cmbConnectionsSearch.activated.connect(self.save_connection)
self.btnServerInfo.clicked.connect(self.connection_info)
self.btnAddDefault.clicked.connect(self.add_default_connections)
self.btnCapabilities.clicked.connect(self.show_xml)
self.tabWidget.currentChanged.connect(self.populate_connection_list)
# server management buttons
self.btnNew.clicked.connect(self.add_connection)
self.btnEdit.clicked.connect(self.edit_connection)
self.btnDelete.clicked.connect(self.delete_connection)
self.btnLoad.clicked.connect(self.load_connections)
self.btnSave.clicked.connect(save_connections)
# Search tab
self.treeRecords.itemSelectionChanged.connect(self.record_clicked)
self.treeRecords.itemDoubleClicked.connect(self.show_metadata)
self.btnSearch.clicked.connect(self.search)
self.leKeywords.returnPressed.connect(self.search)
# prevent dialog from closing upon pressing enter
self.buttonBox.button(QDialogButtonBox.Close).setAutoDefault(False)
# launch help from button
self.buttonBox.helpRequested.connect(self.help)
self.btnCanvasBbox.setAutoDefault(False)
self.btnCanvasBbox.clicked.connect(self.set_bbox_from_map)
self.btnGlobalBbox.clicked.connect(self.set_bbox_global)
# navigation buttons
self.btnFirst.clicked.connect(self.navigate)
self.btnPrev.clicked.connect(self.navigate)
self.btnNext.clicked.connect(self.navigate)
self.btnLast.clicked.connect(self.navigate)
self.mActionAddWms.triggered.connect(self.add_to_ows)
self.mActionAddWfs.triggered.connect(self.add_to_ows)
self.mActionAddWcs.triggered.connect(self.add_to_ows)
self.mActionAddAms.triggered.connect(self.add_to_ows)
self.mActionAddAfs.triggered.connect(self.add_to_ows)
self.mActionAddGisFile.triggered.connect(self.add_gis_file)
self.btnShowXml.clicked.connect(self.show_xml)
self.manageGui()
def manageGui(self):
"""open window"""
self.tabWidget.setCurrentIndex(0)
self.populate_connection_list()
self.btnCapabilities.setEnabled(False)
self.spnRecords.setValue(
int(self.settings.value('/MetaSearch/returnRecords', 10)))
key = '/MetaSearch/%s' % self.cmbConnectionsSearch.currentText()
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
self.set_bbox_global()
self.reset_buttons()
# install proxy handler if specified in QGIS settings
self.install_proxy()
# Servers tab
def populate_connection_list(self):
"""populate select box with connections"""
self.settings.beginGroup('/MetaSearch/')
self.cmbConnectionsServices.clear()
self.cmbConnectionsServices.addItems(self.settings.childGroups())
self.cmbConnectionsSearch.clear()
self.cmbConnectionsSearch.addItems(self.settings.childGroups())
self.settings.endGroup()
self.set_connection_list_position()
if self.cmbConnectionsServices.count() == 0:
# no connections - disable various buttons
state_disabled = False
self.btnSave.setEnabled(state_disabled)
# and start with connection tab open
self.tabWidget.setCurrentIndex(1)
# tell the user to add services
msg = self.tr('No services/connections defined. To get '
'started with MetaSearch, create a new '
'connection by clicking \'New\' or click '
'\'Add default services\'.')
self.textMetadata.setHtml('<p><h3>%s</h3></p>' % msg)
else:
# connections - enable various buttons
state_disabled = True
self.btnServerInfo.setEnabled(state_disabled)
self.btnEdit.setEnabled(state_disabled)
self.btnDelete.setEnabled(state_disabled)
def set_connection_list_position(self):
"""set the current index to the selected connection"""
to_select = self.settings.value('/MetaSearch/selected')
conn_count = self.cmbConnectionsServices.count()
if conn_count == 0:
self.btnDelete.setEnabled(False)
self.btnServerInfo.setEnabled(False)
self.btnEdit.setEnabled(False)
# does to_select exist in cmbConnectionsServices?
exists = False
for i in range(conn_count):
if self.cmbConnectionsServices.itemText(i) == to_select:
self.cmbConnectionsServices.setCurrentIndex(i)
self.cmbConnectionsSearch.setCurrentIndex(i)
exists = True
break
# If we couldn't find the stored item, but there are some, default
# to the last item (this makes some sense when deleting items as it
# allows the user to repeatidly click on delete to remove a whole
# lot of items)
if not exists and conn_count > 0:
# If to_select is null, then the selected connection wasn't found
# by QgsSettings, which probably means that this is the first time
# the user has used CSWClient, so default to the first in the list
# of connetions. Otherwise default to the last.
if not to_select:
current_index = 0
else:
current_index = conn_count - 1
self.cmbConnectionsServices.setCurrentIndex(current_index)
self.cmbConnectionsSearch.setCurrentIndex(current_index)
def save_connection(self):
"""save connection"""
caller = self.sender().objectName()
if caller == 'cmbConnectionsServices': # servers tab
current_text = self.cmbConnectionsServices.currentText()
elif caller == 'cmbConnectionsSearch': # search tab
current_text = self.cmbConnectionsSearch.currentText()
self.settings.setValue('/MetaSearch/selected', current_text)
key = '/MetaSearch/%s' % current_text
if caller == 'cmbConnectionsSearch': # bind to service in search tab
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
if caller == 'cmbConnectionsServices': # clear server metadata
self.textMetadata.clear()
self.btnCapabilities.setEnabled(False)
def connection_info(self):
"""show connection info"""
current_text = self.cmbConnectionsServices.currentText()
key = '/MetaSearch/%s' % current_text
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
# connect to the server
if not self._get_csw():
return
if self.catalog: # display service metadata
self.btnCapabilities.setEnabled(True)
metadata = render_template('en', self.context,
self.catalog,
'service_metadata.html')
style = QgsApplication.reportStyleSheet()
self.textMetadata.clear()
self.textMetadata.document().setDefaultStyleSheet(style)
self.textMetadata.setHtml(metadata)
def add_connection(self):
"""add new service"""
conn_new = NewConnectionDialog()
conn_new.setWindowTitle(self.tr('New Catalog Service'))
if conn_new.exec_() == QDialog.Accepted: # add to service list
self.populate_connection_list()
self.textMetadata.clear()
def edit_connection(self):
"""modify existing connection"""
current_text = self.cmbConnectionsServices.currentText()
url = self.settings.value('/MetaSearch/%s/url' % current_text)
conn_edit = NewConnectionDialog(current_text)
conn_edit.setWindowTitle(self.tr('Edit Catalog Service'))
conn_edit.leName.setText(current_text)
conn_edit.leURL.setText(url)
conn_edit.leUsername.setText(self.settings.value('/MetaSearch/%s/username' % current_text))
conn_edit.lePassword.setText(self.settings.value('/MetaSearch/%s/password' % current_text))
if conn_edit.exec_() == QDialog.Accepted: # update service list
self.populate_connection_list()
def delete_connection(self):
"""delete connection"""
current_text = self.cmbConnectionsServices.currentText()
key = '/MetaSearch/%s' % current_text
msg = self.tr('Remove service {0}?').format(current_text)
result = QMessageBox.question(self, self.tr('Delete Service'), msg,
QMessageBox.Yes | QMessageBox.No, QMessageBox.No)
if result == QMessageBox.Yes: # remove service from list
self.settings.remove(key)
index_to_delete = self.cmbConnectionsServices.currentIndex()
self.cmbConnectionsServices.removeItem(index_to_delete)
self.cmbConnectionsSearch.removeItem(index_to_delete)
self.set_connection_list_position()
def load_connections(self):
"""load services from list"""
ManageConnectionsDialog(1).exec_()
self.populate_connection_list()
def add_default_connections(self):
"""add default connections"""
filename = os.path.join(self.context.ppath,
'resources', 'connections-default.xml')
doc = get_connections_from_file(self, filename)
if doc is None:
return
self.settings.beginGroup('/MetaSearch/')
keys = self.settings.childGroups()
self.settings.endGroup()
for server in doc.findall('csw'):
name = server.attrib.get('name')
# check for duplicates
if name in keys:
msg = self.tr('{0} exists. Overwrite?').format(name)
res = QMessageBox.warning(self,
self.tr('Loading connections'), msg,
QMessageBox.Yes | QMessageBox.No)
if res != QMessageBox.Yes:
continue
# no dups detected or overwrite is allowed
key = '/MetaSearch/%s' % name
self.settings.setValue('%s/url' % key, server.attrib.get('url'))
self.populate_connection_list()
# Settings tab
def set_ows_save_title_ask(self):
"""save ows save strategy as save ows title, ask if duplicate"""
self.settings.setValue('/MetaSearch/ows_save_strategy', 'title_ask')
def set_ows_save_title_no_ask(self):
"""save ows save strategy as save ows title, do NOT ask if duplicate"""
self.settings.setValue('/MetaSearch/ows_save_strategy', 'title_no_ask')
def set_ows_save_temp_name(self):
"""save ows save strategy as save with a temporary name"""
self.settings.setValue('/MetaSearch/ows_save_strategy', 'temp_name')
# Search tab
def set_bbox_from_map(self):
"""set bounding box from map extent"""
crs = self.map.mapSettings().destinationCrs()
try:
crsid = int(crs.authid().split(':')[1])
except IndexError: # no projection
crsid = 4326
extent = self.map.extent()
if crsid != 4326: # reproject to EPSG:4326
src = QgsCoordinateReferenceSystem(crsid)
dest = QgsCoordinateReferenceSystem("EPSG:4326")
xform = QgsCoordinateTransform(src, dest, QgsProject.instance())
minxy = xform.transform(QgsPointXY(extent.xMinimum(),
extent.yMinimum()))
maxxy = xform.transform(QgsPointXY(extent.xMaximum(),
extent.yMaximum()))
minx, miny = minxy
maxx, maxy = maxxy
else: # 4326
minx = extent.xMinimum()
miny = extent.yMinimum()
maxx = extent.xMaximum()
maxy = extent.yMaximum()
self.leNorth.setText(str(maxy)[0:9])
self.leSouth.setText(str(miny)[0:9])
self.leWest.setText(str(minx)[0:9])
self.leEast.setText(str(maxx)[0:9])
def set_bbox_global(self):
"""set global bounding box"""
self.leNorth.setText('90')
self.leSouth.setText('-90')
self.leWest.setText('-180')
self.leEast.setText('180')
def search(self):
"""execute search"""
self.catalog = None
self.constraints = []
# clear all fields and disable buttons
self.lblResults.clear()
self.treeRecords.clear()
self.reset_buttons()
# save some settings
self.settings.setValue('/MetaSearch/returnRecords',
self.spnRecords.cleanText())
# set current catalog
current_text = self.cmbConnectionsSearch.currentText()
key = '/MetaSearch/%s' % current_text
self.catalog_url = self.settings.value('%s/url' % key)
self.catalog_username = self.settings.value('%s/username' % key)
self.catalog_password = self.settings.value('%s/password' % key)
# start position and number of records to return
self.startfrom = 0
self.maxrecords = self.spnRecords.value()
# set timeout
self.timeout = self.spnTimeout.value()
# bbox
# CRS is WGS84 with axis order longitude, latitude
# defined by 'urn:ogc:def:crs:OGC:1.3:CRS84'
minx = self.leWest.text()
miny = self.leSouth.text()
maxx = self.leEast.text()
maxy = self.leNorth.text()
bbox = [minx, miny, maxx, maxy]
# only apply spatial filter if bbox is not global
# even for a global bbox, if a spatial filter is applied, then
# the CSW server will skip records without a bbox
if bbox != ['-180', '-90', '180', '90']:
self.constraints.append(BBox(bbox,
crs='urn:ogc:def:crs:OGC:1.3:CRS84'))
# keywords
if self.leKeywords.text():
# TODO: handle multiple word searches
keywords = self.leKeywords.text()
self.constraints.append(PropertyIsLike('csw:AnyText', keywords))
if len(self.constraints) > 1: # exclusive search (a && b)
self.constraints = [self.constraints]
# build request
if not self._get_csw():
return
# TODO: allow users to select resources types
# to find ('service', 'dataset', etc.)
try:
with OverrideCursor(Qt.WaitCursor):
self.catalog.getrecords2(constraints=self.constraints,
maxrecords=self.maxrecords, esn='full')
except ExceptionReport as err:
QMessageBox.warning(self, self.tr('Search error'),
self.tr('Search error: {0}').format(err))
return
except Exception as err:
QMessageBox.warning(self, self.tr('Connection error'),
self.tr('Connection error: {0}').format(err))
return
if self.catalog.results['matches'] == 0:
self.lblResults.setText(self.tr('0 results'))
return
self.display_results()
def display_results(self):
"""display search results"""
self.treeRecords.clear()
position = self.catalog.results['returned'] + self.startfrom
msg = self.tr('Showing {0} - {1} of %n result(s)', 'number of results',
self.catalog.results['matches']).format(self.startfrom + 1,
position)
self.lblResults.setText(msg)
for rec in self.catalog.records:
item = QTreeWidgetItem(self.treeRecords)
if self.catalog.records[rec].type:
item.setText(0, normalize_text(self.catalog.records[rec].type))
else:
item.setText(0, 'unknown')
if self.catalog.records[rec].title:
item.setText(1,
normalize_text(self.catalog.records[rec].title))
if self.catalog.records[rec].identifier:
set_item_data(item, 'identifier',
self.catalog.records[rec].identifier)
self.btnShowXml.setEnabled(True)
if self.catalog.results["matches"] < self.maxrecords:
disabled = False
else:
disabled = True
self.btnFirst.setEnabled(disabled)
self.btnPrev.setEnabled(disabled)
self.btnNext.setEnabled(disabled)
self.btnLast.setEnabled(disabled)
def record_clicked(self):
"""record clicked signal"""
# disable only service buttons
self.reset_buttons(True, False, False)
if not self.treeRecords.selectedItems():
return
item = self.treeRecords.currentItem()
if not item:
return
identifier = get_item_data(item, 'identifier')
try:
record = self.catalog.records[identifier]
except KeyError as err:
QMessageBox.warning(self,
self.tr('Record parsing error'),
'Unable to locate record identifier')
return
# if the record has a bbox, show a footprint on the map
if record.bbox is not None:
points = bbox_to_polygon(record.bbox)
if points is not None:
src = QgsCoordinateReferenceSystem("EPSG:4326")
dst = self.map.mapSettings().destinationCrs()
geom = QgsGeometry.fromWkt(points)
if src.postgisSrid() != dst.postgisSrid():
ctr = QgsCoordinateTransform(src, dst, QgsProject.instance())
try:
geom.transform(ctr)
except Exception as err:
QMessageBox.warning(
self,
self.tr('Coordinate Transformation Error'),
str(err))
self.rubber_band.setToGeometry(geom, None)
# figure out if the data is interactive and can be operated on
self.find_services(record, item)
def find_services(self, record, item):
"""scan record for WMS/WMTS|WFS|WCS endpoints"""
links = record.uris + record.references
services = {}
for link in links:
if 'scheme' in link:
link_type = link['scheme']
elif 'protocol' in link:
link_type = link['protocol']
else:
link_type = None
if link_type is not None:
link_type = link_type.upper()
wmswmst_link_types = list(map(str.upper, link_types.WMSWMST_LINK_TYPES))
wfs_link_types = list(map(str.upper, link_types.WFS_LINK_TYPES))
wcs_link_types = list(map(str.upper, link_types.WCS_LINK_TYPES))
ams_link_types = list(map(str.upper, link_types.AMS_LINK_TYPES))
afs_link_types = list(map(str.upper, link_types.AFS_LINK_TYPES))
gis_file_link_types = list(map(str.upper, link_types.GIS_FILE_LINK_TYPES))
# if the link type exists, and it is one of the acceptable
# interactive link types, then set
if all([link_type is not None,
link_type in wmswmst_link_types + wfs_link_types +
wcs_link_types + ams_link_types + afs_link_types + gis_file_link_types]):
if link_type in wmswmst_link_types:
services['wms'] = link['url']
self.mActionAddWms.setEnabled(True)
if link_type in wfs_link_types:
services['wfs'] = link['url']
self.mActionAddWfs.setEnabled(True)
if link_type in wcs_link_types:
services['wcs'] = link['url']
self.mActionAddWcs.setEnabled(True)
if link_type in ams_link_types:
services['ams'] = link['url']
self.mActionAddAms.setEnabled(True)
if link_type in afs_link_types:
services['afs'] = link['url']
self.mActionAddAfs.setEnabled(True)
if link_type in gis_file_link_types:
services['gis_file'] = link['url']
services['title'] = record.title
self.mActionAddGisFile.setEnabled(True)
self.tbAddData.setEnabled(True)
set_item_data(item, 'link', json.dumps(services))
def navigate(self):
"""manage navigation / paging"""
caller = self.sender().objectName()
if caller == 'btnFirst':
self.startfrom = 0
elif caller == 'btnLast':
self.startfrom = self.catalog.results['matches'] - self.maxrecords
elif caller == 'btnNext':
self.startfrom += self.maxrecords
if self.startfrom >= self.catalog.results["matches"]:
msg = self.tr('End of results. Go to start?')
res = QMessageBox.information(self, self.tr('Navigation'),
msg,
(QMessageBox.Ok |
QMessageBox.Cancel))
if res == QMessageBox.Ok:
self.startfrom = 0
else:
return
elif caller == "btnPrev":
self.startfrom -= self.maxrecords
if self.startfrom <= 0:
msg = self.tr('Start of results. Go to end?')
res = QMessageBox.information(self, self.tr('Navigation'),
msg,
(QMessageBox.Ok |
QMessageBox.Cancel))
if res == QMessageBox.Ok:
self.startfrom = (self.catalog.results['matches'] -
self.maxrecords)
else:
return
try:
with OverrideCursor(Qt.WaitCursor):
self.catalog.getrecords2(constraints=self.constraints,
maxrecords=self.maxrecords,
startposition=self.startfrom, esn='full')
except ExceptionReport as err:
QMessageBox.warning(self, self.tr('Search error'),
self.tr('Search error: {0}').format(err))
return
except Exception as err:
QMessageBox.warning(self, self.tr('Connection error'),
self.tr('Connection error: {0}').format(err))
return
self.display_results()
def add_to_ows(self):
"""add to OWS provider connection list"""
conn_name_matches = []
item = self.treeRecords.currentItem()
if not item:
return
item_data = json.loads(get_item_data(item, 'link'))
caller = self.sender().objectName()
# stype = human name,/qgis/connections-%s,providername
if caller == 'mActionAddWms':
stype = ['OGC:WMS/OGC:WMTS', 'wms', 'wms']
data_url = item_data['wms']
elif caller == 'mActionAddWfs':
stype = ['OGC:WFS', 'wfs', 'WFS']
data_url = item_data['wfs']
elif caller == 'mActionAddWcs':
stype = ['OGC:WCS', 'wcs', 'wcs']
data_url = item_data['wcs']
elif caller == 'mActionAddAms':
stype = ['ESRI:ArcGIS:MapServer', 'ams', 'arcgismapserver']
data_url = item_data['ams'].split('MapServer')[0] + 'MapServer'
elif caller == 'mActionAddAfs':
stype = ['ESRI:ArcGIS:FeatureServer', 'afs', 'arcgisfeatureserver']
data_url = item_data['afs'].split('FeatureServer')[0] + 'FeatureServer'
sname = '%s from MetaSearch' % stype[1]
# store connection
# check if there is a connection with same name
if caller in ['mActionAddAms', 'mActionAddAfs']:
self.settings.beginGroup('/qgis/connections-%s' % stype[2])
else:
self.settings.beginGroup('/qgis/connections-%s' % stype[1])
keys = self.settings.childGroups()
self.settings.endGroup()
for key in keys:
if key.startswith(sname):
conn_name_matches.append(key)
if conn_name_matches:
sname = conn_name_matches[-1]
# check for duplicates
if sname in keys: # duplicate found
msg = self.tr('Connection {0} exists. Overwrite?').format(sname)
res = QMessageBox.warning(self, self.tr('Saving server'), msg,
QMessageBox.Yes | QMessageBox.No | QMessageBox.Cancel)
if res == QMessageBox.No: # assign new name with serial
sname = serialize_string(sname)
elif res == QMessageBox.Cancel:
return
# no dups detected or overwrite is allowed
if caller in ['mActionAddAms', 'mActionAddAfs']:
self.settings.beginGroup('/qgis/connections-%s' % stype[2])
else:
self.settings.beginGroup('/qgis/connections-%s' % stype[1])
self.settings.setValue('/%s/url' % sname, clean_ows_url(data_url))
self.settings.endGroup()
# open provider window
ows_provider = QgsGui.sourceSelectProviderRegistry().createSelectionWidget(
stype[2],
self,
Qt.Widget,
QgsProviderRegistry.WidgetMode.Embedded)
service_type = stype[0]
# connect dialog signals to iface slots
if service_type == 'OGC:WMS/OGC:WMTS':
ows_provider.addRasterLayer.connect(self.iface.addRasterLayer)
conn_cmb = ows_provider.findChild(QWidget, 'cmbConnections')
connect = 'btnConnect_clicked'
elif service_type == 'OGC:WFS':
def addVectorLayer(path, name):
self.iface.mainWindow().addVectorLayer(path, name, 'WFS')
ows_provider.addVectorLayer.connect(addVectorLayer)
conn_cmb = ows_provider.findChild(QWidget, 'cmbConnections')
connect = 'connectToServer'
elif service_type == 'OGC:WCS':
ows_provider.addRasterLayer.connect(self.iface.addRasterLayer)
conn_cmb = ows_provider.findChild(QWidget, 'mConnectionsComboBox')
connect = 'mConnectButton_clicked'
elif service_type == 'ESRI:ArcGIS:MapServer':
ows_provider.addRasterLayer.connect(self.iface.addRasterLayer)
conn_cmb = ows_provider.findChild(QComboBox)
connect = 'connectToServer'
elif service_type == 'ESRI:ArcGIS:FeatureServer':
def addAfsLayer(path, name):
self.iface.mainWindow().addVectorLayer(path, name, 'afs')
ows_provider.addVectorLayer.connect(addAfsLayer)
conn_cmb = ows_provider.findChild(QComboBox)
connect = 'connectToServer'
ows_provider.setModal(False)
ows_provider.show()
# open provider dialogue against added OWS
index = conn_cmb.findText(sname)
if index > -1:
conn_cmb.setCurrentIndex(index)
# only for wfs
if service_type == 'OGC:WFS':
ows_provider.cmbConnections_activated(index)
elif service_type in ['ESRI:ArcGIS:MapServer', 'ESRI:ArcGIS:FeatureServer']:
ows_provider.cmbConnections_activated(index)
getattr(ows_provider, connect)()
def add_gis_file(self):
"""add GIS file from result"""
item = self.treeRecords.currentItem()
if not item:
return
item_data = json.loads(get_item_data(item, 'link'))
gis_file = item_data['gis_file']
title = item_data['title']
layer = self.iface.addVectorLayer(gis_file, title, "ogr")
if not layer:
self.iface.messageBar().pushWarning(None, "Layer failed to load!")
def show_metadata(self):
"""show record metadata"""
if not self.treeRecords.selectedItems():
return
item = self.treeRecords.currentItem()
if not item:
return
identifier = get_item_data(item, 'identifier')
try:
with OverrideCursor(Qt.WaitCursor):
cat = CatalogueServiceWeb(self.catalog_url, timeout=self.timeout, # spellok
username=self.catalog_username,
password=self.catalog_password)
cat.getrecordbyid(
[self.catalog.records[identifier].identifier])
except ExceptionReport as err:
QMessageBox.warning(self, self.tr('GetRecords error'),
self.tr('Error getting response: {0}').format(err))
return
except KeyError as err:
QMessageBox.warning(self,
self.tr('Record parsing error'),
self.tr('Unable to locate record identifier'))
return
record = cat.records[identifier]
record.xml_url = cat.request
crd = RecordDialog()
metadata = render_template('en', self.context,
record, 'record_metadata_dc.html')
style = QgsApplication.reportStyleSheet()
crd.textMetadata.document().setDefaultStyleSheet(style)
crd.textMetadata.setHtml(metadata)
crd.exec_()
def show_xml(self):
"""show XML request / response"""
crd = XMLDialog()
request_html = highlight_xml(self.context, self.catalog.request)
response_html = highlight_xml(self.context, self.catalog.response)
style = QgsApplication.reportStyleSheet()
crd.txtbrXMLRequest.clear()
crd.txtbrXMLResponse.clear()
crd.txtbrXMLRequest.document().setDefaultStyleSheet(style)
crd.txtbrXMLResponse.document().setDefaultStyleSheet(style)
crd.txtbrXMLRequest.setHtml(request_html)
crd.txtbrXMLResponse.setHtml(response_html)
crd.exec_()
def reset_buttons(self, services=True, xml=True, navigation=True):
"""Convenience function to disable WMS/WMTS|WFS|WCS buttons"""
if services:
self.tbAddData.setEnabled(False)
self.mActionAddWms.setEnabled(False)
self.mActionAddWfs.setEnabled(False)
self.mActionAddWcs.setEnabled(False)
self.mActionAddAms.setEnabled(False)
self.mActionAddAfs.setEnabled(False)
self.mActionAddGisFile.setEnabled(False)
if xml:
self.btnShowXml.setEnabled(False)
if navigation:
self.btnFirst.setEnabled(False)
self.btnPrev.setEnabled(False)
self.btnNext.setEnabled(False)
self.btnLast.setEnabled(False)
def help(self):
"""launch help"""
open_url(get_help_url())
def reject(self):
"""back out of dialogue"""
QDialog.reject(self)
self.rubber_band.reset()
def _get_csw(self):
"""convenience function to init owslib.csw.CatalogueServiceWeb""" # spellok
# connect to the server
with OverrideCursor(Qt.WaitCursor):
try:
self.catalog = CatalogueServiceWeb(self.catalog_url, # spellok
timeout=self.timeout,
username=self.catalog_username,
password=self.catalog_password)
return True
except ExceptionReport as err:
msg = self.tr('Error connecting to service: {0}').format(err)
except ValueError as err:
msg = self.tr('Value Error: {0}').format(err)
except Exception as err:
msg = self.tr('Unknown Error: {0}').format(err)
QMessageBox.warning(self, self.tr('CSW Connection error'), msg)
return False
def install_proxy(self):
"""set proxy if one is set in QGIS network settings"""
# initially support HTTP for now
if self.settings.value('/proxy/proxyEnabled') == 'true':
if self.settings.value('/proxy/proxyType') == 'HttpProxy':
ptype = 'http'
else:
return
user = self.settings.value('/proxy/proxyUser')
password = self.settings.value('/proxy/proxyPassword')
host = self.settings.value('/proxy/proxyHost')
port = self.settings.value('/proxy/proxyPort')
proxy_up = ''
proxy_port = ''
if all([user != '', password != '']):
proxy_up = '%s:%s@' % (user, password)
if port != '':
proxy_port = ':%s' % port
conn = '%s://%s%s%s' % (ptype, proxy_up, host, proxy_port)
install_opener(build_opener(ProxyHandler({ptype: conn})))
def save_connections():
"""save servers to list"""
ManageConnectionsDialog(0).exec_()
def get_item_data(item, field):
"""return identifier for a QTreeWidgetItem"""
return item.data(_get_field_value(field), 32)
def set_item_data(item, field, value):
"""set identifier for a QTreeWidgetItem"""
item.setData(_get_field_value(field), 32, value)
def _get_field_value(field):
"""convenience function to return field value integer"""
value = 0
if field == 'identifier':
value = 0
if field == 'link':
value = 1
return value
def bbox_to_polygon(bbox):
"""converts OWSLib bbox object to list of QgsPointXY objects"""
if all([bbox.minx is not None,
bbox.maxx is not None,
bbox.miny is not None,
bbox.maxy is not None]):
minx = float(bbox.minx)
miny = float(bbox.miny)
maxx = float(bbox.maxx)
maxy = float(bbox.maxy)
return 'POLYGON((%.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f, %.2f %.2f))' % (minx, miny, minx, maxy, maxx, maxy, maxx, miny, minx, miny) # noqa
else:
return None
| gpl-2.0 |
BrandonY/python-docs-samples | appengine/flexible/sendgrid/main_test.py | 8 | 1447 | # Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
@pytest.fixture
def app(monkeypatch):
monkeypatch.setenv('SENDGRID_API_KEY', 'apikey')
monkeypatch.setenv('SENDGRID_SENDER', 'sender@example.com')
import main
main.app.testing = True
return main.app.test_client()
def test_get(app):
r = app.get('/')
assert r.status_code == 200
@mock.patch('python_http_client.client.Client._make_request')
def test_post(make_request_mock, app):
response = mock.Mock()
response.getcode.return_value = 200
response.read.return_value = 'OK'
response.info.return_value = {}
make_request_mock.return_value = response
app.post('/send/email', data={
'to': 'user@example.com'
})
assert make_request_mock.called
request = make_request_mock.call_args[0][1]
assert 'user@example.com' in request.data.decode('utf-8')
| apache-2.0 |
LecomteEmerick/Essentia-build | test/src/unittest/standard/test_unaryoperator.py | 10 | 3671 | #!/usr/bin/env python
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
class TestUnaryOperator(TestCase):
testInput = [1,2,3,4,3.4,-5.0008, 100034]
def testEmpty(self):
self.assertEqualVector(UnaryOperator()([]), [])
def testOne(self):
self.assertEqualVector(UnaryOperator(type="identity")([101]), [101])
def testAbs(self):
self.assertAlmostEqualVector(UnaryOperator(type="abs")(self.testInput),
[1,2,3,4,3.4,5.0008,100034])
def testLog10(self):
self.assertAlmostEqualVector(
UnaryOperator(type="log10")(self.testInput),
[0., 0.30103001, 0.4771212637, 0.60206002, 0.5314789414, -30., 5.0001478195])
def testLog(self):
self.assertAlmostEqualVector(
UnaryOperator(type="log")(self.testInput),
[0., 0.6931471825, 1.0986123085, 1.3862943649, 1.223775506, -69.0775527954, 11.5132656097])
def testLn(self):
self.assertAlmostEqualVector(UnaryOperator(type="ln")(self.testInput),
[0, 0.693147181, 1.098612289, 1.386294361, 1.223775432, -69.07755279, 11.513265407])
def testLin2Db(self):
self.assertAlmostEqualVector(
UnaryOperator(type="lin2db")(self.testInput),
[0., 3.01029992, 4.77121258, 6.02059984, 5.3147893, -90., 50.00147629])
def testDb2Lin(self):
# remove the last element because it causes an overflow because it is
# too large
self.assertAlmostEqualVector(
UnaryOperator(type="db2lin")(self.testInput[:-1]),
[1.25892544, 1.58489323, 1.99526227, 2.51188636, 2.18776178, 0.3161695],
2e-7)
def testSine(self):
self.assertAlmostEqualVector(UnaryOperator(type="sin")(self.testInput),
[0.841470985, 0.909297427, 0.141120008, -0.756802495, -0.255541102, 0.958697038, -0.559079868], 1e-6)
def testCosine(self):
self.assertAlmostEqualVector(UnaryOperator(type="cos")(self.testInput),
[0.540302306, -0.416146837, -0.989992497, -0.653643621, -0.966798193, 0.284429234, 0.829113805], 1e-6)
def testSqrt(self):
# first take abs so we won't take sqrt of a negative (that test comes later)
absInput = UnaryOperator(type="abs")(self.testInput)
self.assertAlmostEqualVector(UnaryOperator(type="sqrt")(absInput),
[1, 1.414213562, 1.732050808, 2, 1.843908891, 2.236246856, 316.281520168])
def testSqrtNegative(self):
self.assertComputeFails(UnaryOperator(type="sqrt"),([0, -1, 1]))
def testSquare(self):
self.assertAlmostEqualVector(UnaryOperator(type="square")(self.testInput),
[1, 4, 9, 16, 11.56, 25.0080006, 10006801156])
def testInvalidParam(self):
self.assertConfigureFails(UnaryOperator(), {'type':'exp'})
suite = allTests(TestUnaryOperator)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 |
haowu4682/gem5 | src/arch/x86/isa/insts/general_purpose/rotate_and_shift/shift.py | 91 | 6764 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop SAL_R_I
{
slli reg, reg, imm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SAL_M_I
{
ldst t1, seg, sib, disp
slli t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SAL_P_I
{
rdip t7
ldst t1, seg, riprel, disp
slli t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SAL_1_R
{
slli reg, reg, 1, flags=(CF,OF,SF,ZF,PF)
};
def macroop SAL_1_M
{
ldst t1, seg, sib, disp
slli t1, t1, 1, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SAL_1_P
{
rdip t7
ldst t1, seg, riprel, disp
slli t1, t1, 1, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SAL_R_R
{
sll reg, reg, regm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SAL_M_R
{
ldst t1, seg, sib, disp
sll t1, t1, reg, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SAL_P_R
{
rdip t7
ldst t1, seg, riprel, disp
sll t1, t1, reg, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHLD_R_R
{
mdbi regm, 0
sld reg, reg, rcx, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHLD_M_R
{
ldst t1, seg, sib, disp
mdbi reg, 0
sld t1, t1, rcx, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHLD_P_R
{
rdip t7
ldst t1, seg, riprel, disp
mdbi reg, 0
sld t1, t1, rcx, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHLD_R_R_I
{
mdbi regm, 0
sldi reg, reg, imm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHLD_M_R_I
{
ldst t1, seg, sib, disp
mdbi reg, 0
sldi t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHLD_P_R_I
{
rdip t7
ldst t1, seg, riprel, disp
mdbi reg, 0
sldi t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHR_R_I
{
srli reg, reg, imm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHR_M_I
{
ldst t1, seg, sib, disp
srli t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHR_P_I
{
rdip t7
ldst t1, seg, riprel, disp
srli t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHR_1_R
{
srli reg, reg, 1, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHR_1_M
{
ldst t1, seg, sib, disp
srli t1, t1, 1, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHR_1_P
{
rdip t7
ldst t1, seg, riprel, disp
srli t1, t1, 1, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHR_R_R
{
srl reg, reg, regm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHR_M_R
{
ldst t1, seg, sib, disp
srl t1, t1, reg, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHR_P_R
{
rdip t7
ldst t1, seg, riprel, disp
srl t1, t1, reg, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHRD_R_R
{
mdbi regm, 0
srd reg, reg, rcx, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHRD_M_R
{
ldst t1, seg, sib, disp
mdbi reg, 0
srd t1, t1, rcx, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHRD_P_R
{
rdip t7
ldst t1, seg, riprel, disp
mdbi reg, 0
srd t1, t1, rcx, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SHRD_R_R_I
{
mdbi regm, 0
srdi reg, reg, imm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SHRD_M_R_I
{
ldst t1, seg, sib, disp
mdbi reg, 0
srdi t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SHRD_P_R_I
{
rdip t7
ldst t1, seg, riprel, disp
mdbi reg, 0
srdi t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SAR_R_I
{
srai reg, reg, imm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SAR_M_I
{
ldst t1, seg, sib, disp
srai t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SAR_P_I
{
rdip t7
ldst t1, seg, riprel, disp
srai t1, t1, imm, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SAR_1_R
{
srai reg, reg, 1, flags=(CF,OF,SF,ZF,PF)
};
def macroop SAR_1_M
{
ldst t1, seg, sib, disp
srai t1, t1, 1, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SAR_1_P
{
rdip t7
ldst t1, seg, riprel, disp
srai t1, t1, 1, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
def macroop SAR_R_R
{
sra reg, reg, regm, flags=(CF,OF,SF,ZF,PF)
};
def macroop SAR_M_R
{
ldst t1, seg, sib, disp
sra t1, t1, reg, flags=(CF,OF,SF,ZF,PF)
st t1, seg, sib, disp
};
def macroop SAR_P_R
{
rdip t7
ldst t1, seg, riprel, disp
sra t1, t1, reg, flags=(CF,OF,SF,ZF,PF)
st t1, seg, riprel, disp
};
'''
| bsd-3-clause |
webmull/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/config/committervalidator_unittest.py | 120 | 2649 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.host_mock import MockHost
from .committervalidator import CommitterValidator
class CommitterValidatorTest(unittest.TestCase):
def test_flag_permission_rejection_message(self):
validator = CommitterValidator(MockHost())
self.assertEqual(validator._committers_py_path(), "Tools/Scripts/webkitpy/common/config/committers.py")
expected_messsage = """foo@foo.com does not have review permissions according to http://trac.webkit.org/browser/trunk/Tools/Scripts/webkitpy/common/config/committers.py.
- If you do not have review rights please read http://webkit.org/coding/contributing.html for instructions on how to use bugzilla flags.
- If you have review rights please correct the error in Tools/Scripts/webkitpy/common/config/committers.py by adding yourself to the file (no review needed). The commit-queue restarts itself every 2 hours. After restart the commit-queue will correctly respect your review rights."""
self.assertMultiLineEqual(validator._flag_permission_rejection_message("foo@foo.com", "review"), expected_messsage)
| bsd-3-clause |
octavioturra/aritial | google_appengine/google/appengine/cron/groctimespecification.py | 5 | 10878 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Implementation of scheduling for Groc format schedules.
A Groc schedule looks like '1st,2nd monday 9:00', or 'every 20 mins'. This
module takes a parsed schedule (produced by Antlr) and creates objects that
can produce times that match this schedule.
A parsed schedule is one of two types - an Interval or a Specific Time.
See the class docstrings for more.
Extensions to be considered:
allowing a comma separated list of times to run
allowing the user to specify particular days of the month to run
"""
import calendar
import datetime
try:
import pytz
except ImportError:
pytz = None
import groc
HOURS = 'hours'
MINUTES = 'minutes'
try:
from pytz import NonExistentTimeError
from pytz import AmbiguousTimeError
except ImportError:
class NonExistentTimeError(Exception):
pass
class AmbiguousTimeError(Exception):
pass
def GrocTimeSpecification(schedule, timezone=None):
"""Factory function.
Turns a schedule specification into a TimeSpecification.
Arguments:
schedule: the schedule specification, as a string
timezone: the optional timezone as a string for this specification.
Defaults to 'UTC' - valid entries are things like 'Australia/Victoria'
or 'PST8PDT'.
Returns:
a TimeSpecification instance
"""
parser = groc.CreateParser(schedule)
parser.timespec()
if parser.getTokenStream().LT(1).getText():
raise groc.GrocException(
'Extra token %r' % parser.getTokenStream().LT(1).getText())
if parser.period_string:
return IntervalTimeSpecification(parser.interval_mins,
parser.period_string,
parser.synchronized)
else:
return SpecificTimeSpecification(parser.ordinal_set, parser.weekday_set,
parser.month_set,
parser.monthday_set,
parser.time_string,
timezone)
class TimeSpecification(object):
"""Base class for time specifications."""
def GetMatches(self, start, n):
"""Returns the next n times that match the schedule, starting at time start.
Arguments:
start: a datetime to start from. Matches will start from after this time.
n: the number of matching times to return
Returns:
a list of n datetime objects
"""
out = []
for _ in range(n):
start = self.GetMatch(start)
out.append(start)
return out
def GetMatch(self, start):
"""Returns the next match after time start.
Must be implemented in subclasses.
Arguments:
start: a datetime to start with. Matches will start from this time.
Returns:
a datetime object
"""
raise NotImplementedError
class IntervalTimeSpecification(TimeSpecification):
"""A time specification for a given interval.
An Interval type spec runs at the given fixed interval. It has three
attributes:
period - the type of interval, either 'hours' or 'minutes'
interval - the number of units of type period.
synchronized - whether to synchronize the times to be locked to a fixed
period (midnight).
"""
def __init__(self, interval, period, synchronized=False):
super(IntervalTimeSpecification, self).__init__()
if interval < 1:
raise groc.GrocException('interval must be greater than zero')
self.interval = interval
self.period = period
self.synchronized = synchronized
if self.period == HOURS:
self.seconds = self.interval * 3600
else:
self.seconds = self.interval * 60
if self.synchronized:
if (self.seconds > 86400) or ((86400 % self.seconds) != 0):
raise groc.GrocException('can only use synchronized for periods that'
' divide evenly into 24 hours')
def GetMatch(self, t):
"""Returns the next match after time 't'.
Arguments:
t: a datetime to start from. Matches will start from after this time.
Returns:
a datetime object
"""
if not self.synchronized:
return t + datetime.timedelta(seconds=self.seconds)
else:
daystart = t.replace(hour=0, minute=0, second=0, microsecond=0)
dayseconds = (t - daystart).seconds
delta = self.seconds - (dayseconds % self.seconds)
return t + datetime.timedelta(seconds=delta)
class SpecificTimeSpecification(TimeSpecification):
"""Specific time specification.
A Specific interval is more complex, but defines a certain time to run and
the days that it should run. It has the following attributes:
time - the time of day to run, as 'HH:MM'
ordinals - first, second, third &c, as a set of integers in 1..5
months - the months that this should run, as a set of integers in 1..12
weekdays - the days of the week that this should run, as a set of integers,
0=Sunday, 6=Saturday
timezone - the optional timezone as a string for this specification.
Defaults to UTC - valid entries are things like Australia/Victoria
or PST8PDT.
A specific time schedule can be quite complex. A schedule could look like
this:
'1st,third sat,sun of jan,feb,mar 09:15'
In this case, ordinals would be {1,3}, weekdays {0,6}, months {1,2,3} and
time would be '09:15'.
"""
timezone = None
def __init__(self, ordinals=None, weekdays=None, months=None, monthdays=None,
timestr='00:00', timezone=None):
super(SpecificTimeSpecification, self).__init__()
if weekdays and monthdays:
raise ValueError('cannot supply both monthdays and weekdays')
if ordinals is None:
self.ordinals = set(range(1, 6))
else:
self.ordinals = set(ordinals)
if weekdays is None:
self.weekdays = set(range(7))
else:
self.weekdays = set(weekdays)
if months is None:
self.months = set(range(1, 13))
else:
self.months = set(months)
if not monthdays:
self.monthdays = set()
else:
if max(monthdays) > 31 or min(monthdays) < 1:
raise ValueError('invalid day of month')
self.monthdays = set(monthdays)
hourstr, minutestr = timestr.split(':')
self.time = datetime.time(int(hourstr), int(minutestr))
if timezone:
if pytz is None:
raise ValueError('need pytz in order to specify a timezone')
self.timezone = pytz.timezone(timezone)
def _MatchingDays(self, year, month):
"""Returns matching days for the given year and month.
For the given year and month, return the days that match this instance's
day specification, based on either (a) the ordinals and weekdays, or
(b) the explicitly specified monthdays. If monthdays are specified,
dates that fall outside the range of the month will not be returned.
Arguments:
year: the year as an integer
month: the month as an integer, in range 1-12
Returns:
a list of matching days, as ints in range 1-31
"""
start_day, last_day = calendar.monthrange(year, month)
if self.monthdays:
return sorted([day for day in self.monthdays if day <= last_day])
out_days = []
start_day = (start_day + 1) % 7
for ordinal in self.ordinals:
for weekday in self.weekdays:
day = ((weekday - start_day) % 7) + 1
day += 7 * (ordinal - 1)
if day <= last_day:
out_days.append(day)
return sorted(out_days)
def _NextMonthGenerator(self, start, matches):
"""Creates a generator that produces results from the set 'matches'.
Matches must be >= 'start'. If none match, the wrap counter is incremented,
and the result set is reset to the full set. Yields a 2-tuple of (match,
wrapcount).
Arguments:
start: first set of matches will be >= this value (an int)
matches: the set of potential matches (a sequence of ints)
Yields:
a two-tuple of (match, wrap counter). match is an int in range (1-12),
wrapcount is a int indicating how many times we've wrapped around.
"""
potential = matches = sorted(matches)
after = start - 1
wrapcount = 0
while True:
potential = [x for x in potential if x > after]
if not potential:
wrapcount += 1
potential = matches
after = potential[0]
yield (after, wrapcount)
def GetMatch(self, start):
"""Returns the next time that matches the schedule after time start.
Arguments:
start: a UTC datetime to start from. Matches will start after this time
Returns:
a datetime object
"""
start_time = start
if self.timezone and pytz is not None:
if not start_time.tzinfo:
start_time = pytz.utc.localize(start_time)
start_time = start_time.astimezone(self.timezone)
start_time = start_time.replace(tzinfo=None)
if self.months:
months = self._NextMonthGenerator(start_time.month, self.months)
while True:
month, yearwraps = months.next()
candidate_month = start_time.replace(day=1, month=month,
year=start_time.year + yearwraps)
day_matches = self._MatchingDays(candidate_month.year, month)
if ((candidate_month.year, candidate_month.month)
== (start_time.year, start_time.month)):
day_matches = [x for x in day_matches if x >= start_time.day]
while (day_matches and day_matches[0] == start_time.day
and start_time.time() >= self.time):
day_matches.pop(0)
while day_matches:
out = candidate_month.replace(day=day_matches[0], hour=self.time.hour,
minute=self.time.minute, second=0,
microsecond=0)
if self.timezone and pytz is not None:
try:
out = self.timezone.localize(out, is_dst=None)
except AmbiguousTimeError:
out = self.timezone.localize(out)
except NonExistentTimeError:
for _ in range(24):
out = out + datetime.timedelta(minutes=60)
try:
out = self.timezone.localize(out)
except NonExistentTimeError:
continue
break
out = out.astimezone(pytz.utc)
return out
| apache-2.0 |
tarballs-are-good/sympy | sympy/physics/units.py | 2 | 5278 | """
Physical units and dimensions.
The base class is Unit, where all here defined units (~200) inherit from.
"""
from sympy import Rational, pi
from sympy.core import AtomicExpr
class Unit(AtomicExpr):
"""
Base class for all physical units.
Create own units like:
m = Unit("meter", "m")
"""
is_positive = True # make (m**2)**Rational(1,2) --> m
is_commutative = True
__slots__ = ["name", "abbrev"]
def __new__(cls, name, abbrev, **assumptions):
obj = AtomicExpr.__new__(cls, **assumptions)
assert isinstance(name, str),`type(name)`
assert isinstance(abbrev, str),`type(abbrev)`
obj.name = name
obj.abbrev = abbrev
return obj
def __getnewargs__(self):
return (self.name, self.abbrev)
def __eq__(self, other):
return isinstance(other, Unit) and self.name == other.name
def __hash__(self):
return super(Unit, self).__hash__()
def _hashable_content(self):
return (self.name,self.abbrev)
def defunit(value, *names):
u = value
g = globals()
for name in names:
g[name] = u
# Dimensionless
percent = percents = Rational(1,100)
permille = permille = Rational(1,1000)
ten = Rational(10)
yotta = ten**24
zetta = ten**21
exa = ten**18
peta = ten**15
tera = ten**12
giga = ten**9
mega = ten**6
kilo = ten**3
deca = ten**1
deci = ten**-1
centi = ten**-2
milli = ten**-3
micro = ten**-6
nano = ten**-9
pico = ten**-12
femto = ten**-15
atto = ten**-18
zepto = ten**-21
yocto = ten**-24
rad = radian = radians = 1
deg = degree = degrees = pi/180
# Base units
defunit(Unit('meter', 'm'), 'm', 'meter', 'meters')
defunit(Unit('kilogram', 'kg'), 'kg', 'kilogram', 'kilograms')
defunit(Unit('second', 's'), 's', 'second', 'seconds')
defunit(Unit('ampere', 'A'), 'A', 'ampere', 'amperes')
defunit(Unit('kelvin', 'K'), 'K', 'kelvin', 'kelvins')
defunit(Unit('mole', 'mol'), 'mol', 'mole', 'moles')
defunit(Unit('candela', 'cd'), 'cd', 'candela', 'candelas')
# Derived units
defunit(1/s, 'Hz', 'hz', 'hertz')
defunit(m*kg/s**2, 'N', 'newton', 'newtons')
defunit(N*m, 'J', 'joule', 'joules')
defunit(J/s, 'W', 'watt', 'watts')
defunit(N/m**2, 'Pa', 'pa', 'pascal', 'pascals')
defunit(s*A, 'C', 'coulomb', 'coulombs')
defunit(W/A, 'v', 'V', 'volt', 'volts')
defunit(V/A, 'ohm', 'ohms')
defunit(A/V, 'S', 'siemens', 'mho', 'mhos')
defunit(C/V, 'F', 'farad', 'farads')
defunit(J/A, 'Wb', 'wb', 'weber', 'webers')
defunit(V*s/m**2, 'T', 'tesla', 'teslas')
defunit(V*s/A, 'H', 'henry', 'henrys')
# Common length units
defunit(kilo*m, 'km', 'kilometer', 'kilometers')
defunit(deci*m, 'dm', 'decimeter', 'decimeters')
defunit(centi*m, 'cm', 'centimeter', 'centimeters')
defunit(milli*m, 'mm', 'millimeter', 'millimeters')
defunit(micro*m, 'um', 'micrometer', 'micrometers', 'micron', 'microns')
defunit(nano*m, 'nm', 'nanometer', 'nanometers')
defunit(pico*m, 'pm', 'picometer', 'picometers')
defunit(Rational('0.3048')*m, 'ft', 'foot', 'feet')
defunit(Rational('25.4')*mm, 'inch', 'inches')
defunit(3*ft, 'yd', 'yard', 'yards')
defunit(5280*ft, 'mi', 'mile', 'miles')
# Common volume and area units
defunit(m**3 / 1000, 'l', 'liter', 'liters')
defunit(deci*l, 'dl', 'deciliter', 'deciliters')
defunit(centi*l, 'cl', 'centiliter', 'centiliters')
defunit(milli*l, 'ml', 'milliliter', 'milliliters')
# Common time units
defunit(milli*s, 'ms', 'millisecond', 'milliseconds')
defunit(micro*s, 'us', 'microsecond', 'microseconds')
defunit(nano*s, 'ns', 'nanosecond', 'nanoseconds')
defunit(pico*s, 'ps', 'picosecond', 'picoseconds')
defunit(60*s, 'minute', 'minutes')
defunit(60*minute, 'h', 'hour', 'hours')
defunit(24*hour, 'day', 'days')
defunit(Rational('31558149.540')*s, 'sidereal_year', 'sidereal_years')
defunit(Rational('365.24219')*day, 'tropical_year', 'tropical_years')
defunit(Rational('365')*day, 'common_year', 'common_years')
defunit(Rational('365.25')*day, 'julian_year', 'julian_years')
year = years = tropical_year
# Common mass units
defunit(kilogram / kilo, 'g', 'gram', 'grams')
defunit(milli * g, 'mg', 'milligram', 'milligrams')
defunit(micro * g, 'ug', 'microgram', 'micrograms')
#----------------------------------------------------------------------------
# Physical constants
#
c = speed_of_light = 299792458 * m/s
G = gravitational_constant = Rational('6.67428') * ten**-11 * m**3 / kg / s**2
u0 = magnetic_constant = 4*pi * ten**-7 * N/A**2
e0 = electric_constant = 1/(u0 * c**2)
Z0 = vacuum_impedance = u0 * c
planck = Rational('6.62606896') * ten**-34 * J*s
hbar = planck / (2*pi)
avogadro = (Rational('6.02214179') * 10**23) / mol
boltzmann = Rational('1.3806505') * ten**-23 * J / K
gee = gees = Rational('9.80665') * m/s**2
atmosphere = atmospheres = atm = 101325 * pascal
kPa = kilo*Pa
bar = bars = 100*kPa
pound = pounds = 0.45359237 * kg * gee #exact
psi = pound / inch ** 2
dHg0 = 13.5951 # approx value at 0 C
mmHg = dHg0 * 9.80665 * Pa
amu = amus = gram / avogadro
quart = quarts = 231 * inch**3
eV = 1.602176487e-19 * J
# Other convenient units and magnitudes
defunit(c*julian_year, 'ly', 'lightyear', 'lightyears')
defunit(149597870691*m, 'au', 'astronomical_unit', 'astronomical_units')
# Delete this so it doesn't pollute the namespace
del Rational, pi
| bsd-3-clause |
SRabbelier/Melange | thirdparty/google_appengine/google/appengine/api/mail_service_pb.py | 1 | 18516 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
class MailServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
INTERNAL_ERROR = 1
BAD_REQUEST = 2
UNAUTHORIZED_SENDER = 3
INVALID_ATTACHMENT_TYPE = 4
_ErrorCode_NAMES = {
0: "OK",
1: "INTERNAL_ERROR",
2: "BAD_REQUEST",
3: "UNAUTHORIZED_SENDER",
4: "INVALID_ATTACHMENT_TYPE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MailAttachment(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_data_ = 0
data_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_data()): self.set_data(x.data())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_data_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.data_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_filename_):
n += 1
n += self.lengthString(len(self.filename_))
if (self.has_data_):
n += 1
n += self.lengthString(len(self.data_))
return n
def Clear(self):
self.clear_filename()
self.clear_data()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(18)
out.putPrefixedString(self.data_)
def OutputPartial(self, out):
if (self.has_filename_):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
if (self.has_data_):
out.putVarInt32(18)
out.putPrefixedString(self.data_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_data(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("FileName: %s\n" % self.DebugFormatString(self.filename_))
if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kFileName = 1
kData = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "FileName",
2: "Data",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MailMessage(ProtocolBuffer.ProtocolMessage):
has_sender_ = 0
sender_ = ""
has_replyto_ = 0
replyto_ = ""
has_subject_ = 0
subject_ = ""
has_textbody_ = 0
textbody_ = ""
has_htmlbody_ = 0
htmlbody_ = ""
def __init__(self, contents=None):
self.to_ = []
self.cc_ = []
self.bcc_ = []
self.attachment_ = []
if contents is not None: self.MergeFromString(contents)
def sender(self): return self.sender_
def set_sender(self, x):
self.has_sender_ = 1
self.sender_ = x
def clear_sender(self):
if self.has_sender_:
self.has_sender_ = 0
self.sender_ = ""
def has_sender(self): return self.has_sender_
def replyto(self): return self.replyto_
def set_replyto(self, x):
self.has_replyto_ = 1
self.replyto_ = x
def clear_replyto(self):
if self.has_replyto_:
self.has_replyto_ = 0
self.replyto_ = ""
def has_replyto(self): return self.has_replyto_
def to_size(self): return len(self.to_)
def to_list(self): return self.to_
def to(self, i):
return self.to_[i]
def set_to(self, i, x):
self.to_[i] = x
def add_to(self, x):
self.to_.append(x)
def clear_to(self):
self.to_ = []
def cc_size(self): return len(self.cc_)
def cc_list(self): return self.cc_
def cc(self, i):
return self.cc_[i]
def set_cc(self, i, x):
self.cc_[i] = x
def add_cc(self, x):
self.cc_.append(x)
def clear_cc(self):
self.cc_ = []
def bcc_size(self): return len(self.bcc_)
def bcc_list(self): return self.bcc_
def bcc(self, i):
return self.bcc_[i]
def set_bcc(self, i, x):
self.bcc_[i] = x
def add_bcc(self, x):
self.bcc_.append(x)
def clear_bcc(self):
self.bcc_ = []
def subject(self): return self.subject_
def set_subject(self, x):
self.has_subject_ = 1
self.subject_ = x
def clear_subject(self):
if self.has_subject_:
self.has_subject_ = 0
self.subject_ = ""
def has_subject(self): return self.has_subject_
def textbody(self): return self.textbody_
def set_textbody(self, x):
self.has_textbody_ = 1
self.textbody_ = x
def clear_textbody(self):
if self.has_textbody_:
self.has_textbody_ = 0
self.textbody_ = ""
def has_textbody(self): return self.has_textbody_
def htmlbody(self): return self.htmlbody_
def set_htmlbody(self, x):
self.has_htmlbody_ = 1
self.htmlbody_ = x
def clear_htmlbody(self):
if self.has_htmlbody_:
self.has_htmlbody_ = 0
self.htmlbody_ = ""
def has_htmlbody(self): return self.has_htmlbody_
def attachment_size(self): return len(self.attachment_)
def attachment_list(self): return self.attachment_
def attachment(self, i):
return self.attachment_[i]
def mutable_attachment(self, i):
return self.attachment_[i]
def add_attachment(self):
x = MailAttachment()
self.attachment_.append(x)
return x
def clear_attachment(self):
self.attachment_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_sender()): self.set_sender(x.sender())
if (x.has_replyto()): self.set_replyto(x.replyto())
for i in xrange(x.to_size()): self.add_to(x.to(i))
for i in xrange(x.cc_size()): self.add_cc(x.cc(i))
for i in xrange(x.bcc_size()): self.add_bcc(x.bcc(i))
if (x.has_subject()): self.set_subject(x.subject())
if (x.has_textbody()): self.set_textbody(x.textbody())
if (x.has_htmlbody()): self.set_htmlbody(x.htmlbody())
for i in xrange(x.attachment_size()): self.add_attachment().CopyFrom(x.attachment(i))
def Equals(self, x):
if x is self: return 1
if self.has_sender_ != x.has_sender_: return 0
if self.has_sender_ and self.sender_ != x.sender_: return 0
if self.has_replyto_ != x.has_replyto_: return 0
if self.has_replyto_ and self.replyto_ != x.replyto_: return 0
if len(self.to_) != len(x.to_): return 0
for e1, e2 in zip(self.to_, x.to_):
if e1 != e2: return 0
if len(self.cc_) != len(x.cc_): return 0
for e1, e2 in zip(self.cc_, x.cc_):
if e1 != e2: return 0
if len(self.bcc_) != len(x.bcc_): return 0
for e1, e2 in zip(self.bcc_, x.bcc_):
if e1 != e2: return 0
if self.has_subject_ != x.has_subject_: return 0
if self.has_subject_ and self.subject_ != x.subject_: return 0
if self.has_textbody_ != x.has_textbody_: return 0
if self.has_textbody_ and self.textbody_ != x.textbody_: return 0
if self.has_htmlbody_ != x.has_htmlbody_: return 0
if self.has_htmlbody_ and self.htmlbody_ != x.htmlbody_: return 0
if len(self.attachment_) != len(x.attachment_): return 0
for e1, e2 in zip(self.attachment_, x.attachment_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_sender_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sender not set.')
if (not self.has_subject_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: subject not set.')
for p in self.attachment_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.sender_))
if (self.has_replyto_): n += 1 + self.lengthString(len(self.replyto_))
n += 1 * len(self.to_)
for i in xrange(len(self.to_)): n += self.lengthString(len(self.to_[i]))
n += 1 * len(self.cc_)
for i in xrange(len(self.cc_)): n += self.lengthString(len(self.cc_[i]))
n += 1 * len(self.bcc_)
for i in xrange(len(self.bcc_)): n += self.lengthString(len(self.bcc_[i]))
n += self.lengthString(len(self.subject_))
if (self.has_textbody_): n += 1 + self.lengthString(len(self.textbody_))
if (self.has_htmlbody_): n += 1 + self.lengthString(len(self.htmlbody_))
n += 1 * len(self.attachment_)
for i in xrange(len(self.attachment_)): n += self.lengthString(self.attachment_[i].ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_sender_):
n += 1
n += self.lengthString(len(self.sender_))
if (self.has_replyto_): n += 1 + self.lengthString(len(self.replyto_))
n += 1 * len(self.to_)
for i in xrange(len(self.to_)): n += self.lengthString(len(self.to_[i]))
n += 1 * len(self.cc_)
for i in xrange(len(self.cc_)): n += self.lengthString(len(self.cc_[i]))
n += 1 * len(self.bcc_)
for i in xrange(len(self.bcc_)): n += self.lengthString(len(self.bcc_[i]))
if (self.has_subject_):
n += 1
n += self.lengthString(len(self.subject_))
if (self.has_textbody_): n += 1 + self.lengthString(len(self.textbody_))
if (self.has_htmlbody_): n += 1 + self.lengthString(len(self.htmlbody_))
n += 1 * len(self.attachment_)
for i in xrange(len(self.attachment_)): n += self.lengthString(self.attachment_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_sender()
self.clear_replyto()
self.clear_to()
self.clear_cc()
self.clear_bcc()
self.clear_subject()
self.clear_textbody()
self.clear_htmlbody()
self.clear_attachment()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.sender_)
if (self.has_replyto_):
out.putVarInt32(18)
out.putPrefixedString(self.replyto_)
for i in xrange(len(self.to_)):
out.putVarInt32(26)
out.putPrefixedString(self.to_[i])
for i in xrange(len(self.cc_)):
out.putVarInt32(34)
out.putPrefixedString(self.cc_[i])
for i in xrange(len(self.bcc_)):
out.putVarInt32(42)
out.putPrefixedString(self.bcc_[i])
out.putVarInt32(50)
out.putPrefixedString(self.subject_)
if (self.has_textbody_):
out.putVarInt32(58)
out.putPrefixedString(self.textbody_)
if (self.has_htmlbody_):
out.putVarInt32(66)
out.putPrefixedString(self.htmlbody_)
for i in xrange(len(self.attachment_)):
out.putVarInt32(74)
out.putVarInt32(self.attachment_[i].ByteSize())
self.attachment_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_sender_):
out.putVarInt32(10)
out.putPrefixedString(self.sender_)
if (self.has_replyto_):
out.putVarInt32(18)
out.putPrefixedString(self.replyto_)
for i in xrange(len(self.to_)):
out.putVarInt32(26)
out.putPrefixedString(self.to_[i])
for i in xrange(len(self.cc_)):
out.putVarInt32(34)
out.putPrefixedString(self.cc_[i])
for i in xrange(len(self.bcc_)):
out.putVarInt32(42)
out.putPrefixedString(self.bcc_[i])
if (self.has_subject_):
out.putVarInt32(50)
out.putPrefixedString(self.subject_)
if (self.has_textbody_):
out.putVarInt32(58)
out.putPrefixedString(self.textbody_)
if (self.has_htmlbody_):
out.putVarInt32(66)
out.putPrefixedString(self.htmlbody_)
for i in xrange(len(self.attachment_)):
out.putVarInt32(74)
out.putVarInt32(self.attachment_[i].ByteSizePartial())
self.attachment_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_sender(d.getPrefixedString())
continue
if tt == 18:
self.set_replyto(d.getPrefixedString())
continue
if tt == 26:
self.add_to(d.getPrefixedString())
continue
if tt == 34:
self.add_cc(d.getPrefixedString())
continue
if tt == 42:
self.add_bcc(d.getPrefixedString())
continue
if tt == 50:
self.set_subject(d.getPrefixedString())
continue
if tt == 58:
self.set_textbody(d.getPrefixedString())
continue
if tt == 66:
self.set_htmlbody(d.getPrefixedString())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_attachment().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_sender_: res+=prefix+("Sender: %s\n" % self.DebugFormatString(self.sender_))
if self.has_replyto_: res+=prefix+("ReplyTo: %s\n" % self.DebugFormatString(self.replyto_))
cnt=0
for e in self.to_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("To%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.cc_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Cc%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.bcc_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Bcc%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_subject_: res+=prefix+("Subject: %s\n" % self.DebugFormatString(self.subject_))
if self.has_textbody_: res+=prefix+("TextBody: %s\n" % self.DebugFormatString(self.textbody_))
if self.has_htmlbody_: res+=prefix+("HtmlBody: %s\n" % self.DebugFormatString(self.htmlbody_))
cnt=0
for e in self.attachment_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Attachment%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kSender = 1
kReplyTo = 2
kTo = 3
kCc = 4
kBcc = 5
kSubject = 6
kTextBody = 7
kHtmlBody = 8
kAttachment = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Sender",
2: "ReplyTo",
3: "To",
4: "Cc",
5: "Bcc",
6: "Subject",
7: "TextBody",
8: "HtmlBody",
9: "Attachment",
}, 9)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.STRING,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
}, 9, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['MailServiceError','MailAttachment','MailMessage']
| apache-2.0 |
tboyce021/home-assistant | homeassistant/util/ssl.py | 9 | 3197 | """Helper to create SSL contexts."""
from os import environ
import ssl
import certifi
def client_context() -> ssl.SSLContext:
"""Return an SSL context for making requests."""
# Reuse environment variable definition from requests, since it's already a requirement
# If the environment variable has no value, fall back to using certs from certifi package
cafile = environ.get("REQUESTS_CA_BUNDLE", certifi.where())
context = ssl.create_default_context(purpose=ssl.Purpose.SERVER_AUTH, cafile=cafile)
return context
def server_context_modern() -> ssl.SSLContext:
"""Return an SSL context following the Mozilla recommendations.
TLS configuration follows the best-practice guidelines specified here:
https://wiki.mozilla.org/Security/Server_Side_TLS
Modern guidelines are followed.
"""
context = ssl.SSLContext(ssl.PROTOCOL_TLS) # pylint: disable=no-member
context.options |= (
ssl.OP_NO_SSLv2
| ssl.OP_NO_SSLv3
| ssl.OP_NO_TLSv1
| ssl.OP_NO_TLSv1_1
| ssl.OP_CIPHER_SERVER_PREFERENCE
)
if hasattr(ssl, "OP_NO_COMPRESSION"):
context.options |= ssl.OP_NO_COMPRESSION
context.set_ciphers(
"ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:"
"ECDHE-ECDSA-AES128-SHA256:ECDHE-RSA-AES128-SHA256"
)
return context
def server_context_intermediate() -> ssl.SSLContext:
"""Return an SSL context following the Mozilla recommendations.
TLS configuration follows the best-practice guidelines specified here:
https://wiki.mozilla.org/Security/Server_Side_TLS
Intermediate guidelines are followed.
"""
context = ssl.SSLContext(ssl.PROTOCOL_TLS) # pylint: disable=no-member
context.options |= (
ssl.OP_NO_SSLv2 | ssl.OP_NO_SSLv3 | ssl.OP_CIPHER_SERVER_PREFERENCE
)
if hasattr(ssl, "OP_NO_COMPRESSION"):
context.options |= ssl.OP_NO_COMPRESSION
context.set_ciphers(
"ECDHE-ECDSA-CHACHA20-POLY1305:"
"ECDHE-RSA-CHACHA20-POLY1305:"
"ECDHE-ECDSA-AES128-GCM-SHA256:"
"ECDHE-RSA-AES128-GCM-SHA256:"
"ECDHE-ECDSA-AES256-GCM-SHA384:"
"ECDHE-RSA-AES256-GCM-SHA384:"
"DHE-RSA-AES128-GCM-SHA256:"
"DHE-RSA-AES256-GCM-SHA384:"
"ECDHE-ECDSA-AES128-SHA256:"
"ECDHE-RSA-AES128-SHA256:"
"ECDHE-ECDSA-AES128-SHA:"
"ECDHE-RSA-AES256-SHA384:"
"ECDHE-RSA-AES128-SHA:"
"ECDHE-ECDSA-AES256-SHA384:"
"ECDHE-ECDSA-AES256-SHA:"
"ECDHE-RSA-AES256-SHA:"
"DHE-RSA-AES128-SHA256:"
"DHE-RSA-AES128-SHA:"
"DHE-RSA-AES256-SHA256:"
"DHE-RSA-AES256-SHA:"
"ECDHE-ECDSA-DES-CBC3-SHA:"
"ECDHE-RSA-DES-CBC3-SHA:"
"EDH-RSA-DES-CBC3-SHA:"
"AES128-GCM-SHA256:"
"AES256-GCM-SHA384:"
"AES128-SHA256:"
"AES256-SHA256:"
"AES128-SHA:"
"AES256-SHA:"
"DES-CBC3-SHA:"
"!DSS"
)
return context
| apache-2.0 |
basysKom/freeopcua | python/examples/inductive_client.py | 9 | 1516 |
import sys
import time
sys.path.append(".")
from IPython import embed
import opcua
class SubHandler(opcua.SubscriptionHandler):
def __init__(self, *args):
opcua.SubscriptionHandler.__init__(self, *args)
self.val = MessageSecurityMode::None
def data_change(self, handle, node, val, attr):
print("Python: New data change event", handle, node, val, attr)
self.val = val
def event(self, handle, event):
print("Python: New event", handle, event)
self.ev = event
if __name__ == "__main__":
client = opcua.Client(True)
#client.connect("opc.tcp://localhost:4841")
client.connect("opc.tcp://utgaard:12685/ctt-server")
#s.connect("opc.tcp://192.168.56.101:48030")
#edps = client.get_server_endpoints()
try:
root = client.get_root_node()
print("I got root: ", root)
print("Childs are: ", root.get_children())
print("Objects is: ", client.get_objects_node())
o = client.get_objects_node()
print("Children of objects are: ", o.get_children())
myvar = root.get_child(["0:Objects", "2:NewObject", "2:MyVariable"])
print("yvar is: ", myvar)
sclt = SubHandler()
sub = client.create_subscription(100, sclt)
handle = sub.subscribe_data_change(myvar)
print("Subscribe handle is: ", handle)
evhandle = sub.subscribe_events()
print("Subscribe handle is: ", evhandle)
embed()
finally:
client.disconnect()
| lgpl-3.0 |
BT-ojossen/odoo | addons/auth_signup/controllers/main.py | 26 | 6356 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
import werkzeug
import openerp
from openerp.addons.auth_signup.res_users import SignupError
from openerp.addons.web.controllers.main import ensure_db
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class AuthSignupHome(openerp.addons.web.controllers.main.Home):
@http.route()
def web_login(self, *args, **kw):
ensure_db()
response = super(AuthSignupHome, self).web_login(*args, **kw)
response.qcontext.update(self.get_auth_signup_config())
if request.httprequest.method == 'GET' and request.session.uid and request.params.get('redirect'):
# Redirect if already logged in and redirect param is present
return http.redirect_with_hash(request.params.get('redirect'))
return response
@http.route('/web/signup', type='http', auth='public', website=True)
def web_auth_signup(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
except (SignupError, AssertionError), e:
if request.env["res.users"].sudo().search([("login", "=", qcontext.get("login"))]):
qcontext["error"] = _("Another user is already registered using this email address.")
else:
_logger.error(e.message)
qcontext['error'] = _("Could not create a new account.")
return request.render('auth_signup.signup', qcontext)
@http.route('/web/reset_password', type='http', auth='public', website=True)
def web_auth_reset_password(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('reset_password_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
if qcontext.get('token'):
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
else:
login = qcontext.get('login')
assert login, "No login provided."
res_users = request.registry.get('res.users')
res_users.reset_password(request.cr, openerp.SUPERUSER_ID, login)
qcontext['message'] = _("An email has been sent with credentials to reset your password")
except SignupError:
qcontext['error'] = _("Could not reset your password")
_logger.exception('error when resetting password')
except Exception, e:
qcontext['error'] = e.message or e.name
return request.render('auth_signup.reset_password', qcontext)
def get_auth_signup_config(self):
"""retrieve the module config (which features are enabled) for the login page"""
icp = request.registry.get('ir.config_parameter')
return {
'signup_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.allow_uninvited') == 'True',
'reset_password_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.reset_password') == 'True',
}
def get_auth_signup_qcontext(self):
""" Shared helper returning the rendering context for signup and reset password """
qcontext = request.params.copy()
qcontext.update(self.get_auth_signup_config())
if qcontext.get('token'):
try:
# retrieve the user info (name, login or email) corresponding to a signup token
res_partner = request.registry.get('res.partner')
token_infos = res_partner.signup_retrieve_info(request.cr, openerp.SUPERUSER_ID, qcontext.get('token'))
for k, v in token_infos.items():
qcontext.setdefault(k, v)
except:
qcontext['error'] = _("Invalid signup token")
return qcontext
def do_signup(self, qcontext):
""" Shared helper that creates a res.partner out of a token """
values = dict((key, qcontext.get(key)) for key in ('login', 'name', 'password'))
assert any([k for k in values.values()]), "The form was not properly filled in."
assert values.get('password') == qcontext.get('confirm_password'), "Passwords do not match; please retype them."
values['lang'] = request.lang
self._signup_with_values(qcontext.get('token'), values)
request.cr.commit()
def _signup_with_values(self, token, values):
db, login, password = request.registry['res.users'].signup(request.cr, openerp.SUPERUSER_ID, values, token)
request.cr.commit() # as authenticate will use its own cursor we need to commit the current transaction
uid = request.session.authenticate(db, login, password)
if not uid:
raise SignupError(_('Authentification Failed.'))
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
twodee/blockly | i18n/json_to_js.py | 1 | 6581 | #!/usr/bin/python
# Converts .json files into .js files for use within Blockly apps.
#
# Copyright 2013 Google Inc.
# https://developers.google.com/blockly/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import codecs # for codecs.open(..., 'utf-8')
import glob
import json # for json.load()
import os # for os.path()
import subprocess # for subprocess.check_call()
from common import InputError
from common import read_json_file
# Store parsed command-line arguments in global variable.
args = None
def _create_xlf(target_lang):
"""Creates a <target_lang>.xlf file for Soy.
Args:
target_lang: The ISO 639 language code for the target language.
This is used in the name of the file and in the metadata.
Returns:
A pointer to a file to which the metadata has been written.
Raises:
IOError: An error occurred while opening or writing the file.
"""
filename = os.path.join(os.curdir, args.output_dir, target_lang + '.xlf')
out_file = codecs.open(filename, 'w', 'utf-8')
out_file.write("""<?xml version="1.0" encoding="UTF-8"?>
<xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2">
<file original="SoyMsgBundle"
datatype="x-soy-msg-bundle"
xml:space="preserve"
source-language="{0}"
target-language="{1}">
<body>""".format(args.source_lang, target_lang))
return out_file
def _close_xlf(xlf_file):
"""Closes a <target_lang>.xlf file created with create_xlf().
This includes writing the terminating XML.
Args:
xlf_file: A pointer to a file created by _create_xlf().
Raises:
IOError: An error occurred while writing to or closing the file.
"""
xlf_file.write("""
</body>
</file>
</xliff>
""")
xlf_file.close()
def _process_file(path_to_json, target_lang, key_dict):
"""Creates an .xlf file corresponding to the specified .json input file.
The name of the input file must be target_lang followed by '.json'.
The name of the output file will be target_lang followed by '.js'.
Args:
path_to_json: Path to the directory of xx.json files.
target_lang: A IETF language code (RFC 4646), such as 'es' or 'pt-br'.
key_dict: Dictionary mapping Blockly keys (e.g., Maze.turnLeft) to
Closure keys (hash numbers).
Raises:
IOError: An I/O error occurred with an input or output file.
InputError: Input JSON could not be parsed.
KeyError: Key found in input file but not in key file.
"""
keyfile = os.path.join(path_to_json, target_lang + '.json')
j = read_json_file(keyfile)
out_file = _create_xlf(target_lang)
for key in j:
if key != '@metadata':
try:
identifier = key_dict[key]
except KeyError as e:
print('Key "%s" is in %s but not in %s' %
(key, keyfile, args.key_file))
raise e
target = j.get(key)
out_file.write(u"""
<trans-unit id="{0}" datatype="html">
<target>{1}</target>
</trans-unit>""".format(identifier, target))
_close_xlf(out_file)
def main():
"""Parses arguments and iterates over files."""
# Set up argument parser.
parser = argparse.ArgumentParser(description='Convert JSON files to JS.')
parser.add_argument('--source_lang', default='en',
help='ISO 639-1 source language code')
parser.add_argument('--output_dir', default='generated',
help='relative directory for output files')
parser.add_argument('--key_file', default='json' + os.path.sep + 'keys.json',
help='relative path to input keys file')
parser.add_argument('--template', default='template.soy')
parser.add_argument('--path_to_jar',
default='..' + os.path.sep + 'apps' + os.path.sep
+ '_soy',
help='relative path from working directory to '
'SoyToJsSrcCompiler.jar')
parser.add_argument('files', nargs='+', help='input files')
# Initialize global variables.
global args
args = parser.parse_args()
# Make sure output_dir ends with slash.
if (not args.output_dir.endswith(os.path.sep)):
args.output_dir += os.path.sep
# Read in keys.json, mapping descriptions (e.g., Maze.turnLeft) to
# Closure keys (long hash numbers).
key_file = open(args.key_file)
key_dict = json.load(key_file)
key_file.close()
# Process each input file.
print('Creating .xlf files...')
processed_langs = []
if len(args.files) == 1:
# Windows does not expand globs automatically.
args.files = glob.glob(args.files[0])
for arg_file in args.files:
(path_to_json, filename) = os.path.split(arg_file)
if not filename.endswith('.json'):
raise InputError(filename, 'filenames must end with ".json"')
target_lang = filename[:filename.index('.')]
if not target_lang in ('qqq', 'keys'):
processed_langs.append(target_lang)
_process_file(path_to_json, target_lang, key_dict)
# Output command line for Closure compiler.
if processed_langs:
print('Creating .js files...')
processed_lang_list = ','.join(processed_langs)
subprocess.check_call([
'java',
'-jar', os.path.join(args.path_to_jar, 'SoyToJsSrcCompiler.jar'),
'--locales', processed_lang_list,
'--messageFilePathFormat', args.output_dir + '{LOCALE}.xlf',
'--outputPathFormat', args.output_dir + '{LOCALE}.js',
'--srcs', args.template])
if len(processed_langs) == 1:
print('Created ' + processed_lang_list + '.js in ' + args.output_dir)
else:
print('Created {' + processed_lang_list + '}.js in ' + args.output_dir)
for lang in processed_langs:
os.remove(args.output_dir + lang + '.xlf')
print('Removed .xlf files.')
if __name__ == '__main__':
main()
| apache-2.0 |
pchauncey/ansible | lib/ansible/modules/cloud/vmware/vmware_guest_snapshot.py | 21 | 12709 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This module is also sponsored by E.T.A.I. (www.etai.fr)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: vmware_guest_snapshot
short_description: Manages virtual machines snapshots in vcenter
description:
- Create virtual machines snapshots
version_added: 2.3
author:
- James Tanner (@jctanner) <tanner.jc@gmail.com>
- Loic Blot (@nerzhul) <loic.blot@unix-experience.fr>
notes:
- Tested on vSphere 5.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
state:
description:
- Manage snapshots attached to a specific virtual machine.
required: True
choices: ['present', 'absent', 'revert', 'remove_all']
name:
description:
- Name of the VM to work with
- This is required if uuid is not supplied.
name_match:
description:
- If multiple VMs matching the name, use the first or last found
default: 'first'
choices: ['first', 'last']
uuid:
description:
- UUID of the instance to manage if known, this is VMware's unique identifier.
- This is required if name is not supplied.
folder:
description:
- Destination folder, absolute or relative path to find an existing guest.
- This is required if name is supplied.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
- ' folder: vm/folder2'
- ' folder: folder2'
default: /vm
datacenter:
description:
- Destination datacenter for the deploy operation
required: True
snapshot_name:
description:
- Sets the snapshot name to manage.
- This param is required only if state is not C(remove_all)
description:
description:
- Define an arbitrary description to attach to snapshot.
quiesce:
description:
- If set to C(true) and virtual machine is powered on, it will quiesce the
file system in virtual machine.
- Note that VMWare Tools are required for this flag.
- If virtual machine is powered off or VMware Tools are not available, then
this flag is set to C(false).
- If virtual machine does not provide capability to take quiesce snapshot, then
this flag is set to C(false).
required: False
version_added: "2.4"
memory_dump:
description:
- If set to C(true), memory dump of virtual machine is also included in snapshot.
- Note that memory snapshots take time and resources, this will take longer time to create.
- If virtual machine does not provide capability to take memory snapshot, then
this flag is set to C(false).
required: False
version_added: "2.4"
remove_children:
description:
- If set to C(true) and state is set to C(absent), then entire snapshot subtree is set
for removal.
required: False
version_added: "2.4"
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Create snapshot
vmware_guest_snapshot:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
datacenter: datacenter_name
folder: /myfolder
name: dummy_vm
state: present
snapshot_name: snap1
description: snap1_description
delegate_to: localhost
- name: Remove a snapshot
vmware_guest_snapshot:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
name: dummy_vm
datacenter: datacenter_name
folder: /myfolder
state: remove
snapshot_name: snap1
delegate_to: localhost
- name: Revert to a snapshot
vmware_guest_snapshot:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
datacenter: datacenter_name
folder: /myfolder
name: dummy_vm
state: revert
snapshot_name: snap1
delegate_to: localhost
- name: Remove all snapshots of a VM
vmware_guest_snapshot:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
datacenter: datacenter_name
folder: /myfolder
name: dummy_vm
state: remove_all
delegate_to: localhost
- name: Take snapshot of a VM using quiesce and memory flag on
vmware_guest_snapshot:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
name: dummy_vm
state: present
snapshot_name: dummy_vm_snap_0001
quiesce: True
memory_dump: True
delegate_to: localhost
- name: Remove a snapshot and snapshot subtree
vmware_guest_snapshot:
hostname: 192.168.1.209
username: administrator@vsphere.local
password: vmware
name: dummy_vm
state: remove
remove_children: True
snapshot_name: snap1
delegate_to: localhost
'''
RETURN = """
instance:
description: metadata about the new virtualmachine
returned: always
type: dict
sample: None
"""
import time
HAS_PYVMOMI = False
try:
import pyVmomi
from pyVmomi import vim
HAS_PYVMOMI = True
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.vmware import connect_to_api, vmware_argument_spec, find_vm_by_id
class PyVmomiHelper(object):
def __init__(self, module):
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi module required')
self.module = module
self.params = module.params
self.content = connect_to_api(self.module)
def getvm(self, name=None, uuid=None, folder=None):
vm = None
match_first = False
if uuid:
vm = find_vm_by_id(self.content, uuid, vm_id_type="uuid")
elif folder and name:
if self.params['name_match'] == 'first':
match_first = True
vm = find_vm_by_id(self.content, vm_id=name, vm_id_type="inventory_path", folder=folder, match_first=match_first)
return vm
@staticmethod
def wait_for_task(task):
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.Task.html
# https://www.vmware.com/support/developer/vc-sdk/visdk25pubs/ReferenceGuide/vim.TaskInfo.html
# https://github.com/virtdevninja/pyvmomi-community-samples/blob/master/samples/tools/tasks.py
while task.info.state not in ['success', 'error']:
time.sleep(1)
def get_snapshots_by_name_recursively(self, snapshots, snapname):
snap_obj = []
for snapshot in snapshots:
if snapshot.name == snapname:
snap_obj.append(snapshot)
else:
snap_obj = snap_obj + self.get_snapshots_by_name_recursively(snapshot.childSnapshotList, snapname)
return snap_obj
def snapshot_vm(self, vm):
memory_dump = False
quiesce = False
# Check if Virtual Machine provides capabilities for Quiesce and Memory
# Snapshots
if vm.capability.quiescedSnapshotsSupported:
quiesce = self.module.params['quiesce']
if vm.capability.memorySnapshotsSupported:
memory_dump = self.module.params['memory_dump']
task = None
try:
task = vm.CreateSnapshot(self.module.params["snapshot_name"],
self.module.params["description"],
memory_dump,
quiesce)
except vim.fault.RestrictedVersion as exc:
self.module.fail_json(msg="Failed to take snapshot due to VMware Licence: %s" % to_native(exc.msg))
except Exception as exc:
self.module.fail_json(msg="Failed to create snapshot of VM %s due to %s" % (self.module.params['name'], to_native(exc.msg)))
return task
def remove_or_revert_snapshot(self, vm):
if vm.snapshot is None:
self.module.exit_json(msg="VM - %s doesn't have any snapshots" % self.module.params["name"])
snap_obj = self.get_snapshots_by_name_recursively(vm.snapshot.rootSnapshotList,
self.module.params["snapshot_name"])
task = None
if len(snap_obj) == 1:
snap_obj = snap_obj[0].snapshot
if self.module.params["state"] == "absent":
# Remove subtree depending upon the user input
remove_children = self.module.params.get('remove_children', False)
task = snap_obj.RemoveSnapshot_Task(remove_children)
elif self.module.params["state"] == "revert":
task = snap_obj.RevertToSnapshot_Task()
else:
self.module.exit_json(
msg="Couldn't find any snapshots with specified name: %s on VM: %s" %
(self.module.params["snapshot_name"], self.module.params["name"]))
return task
def apply_snapshot_op(self, vm):
result = {}
if self.module.params["state"] == "present":
task = self.snapshot_vm(vm)
elif self.module.params["state"] in ["absent", "revert"]:
task = self.remove_or_revert_snapshot(vm)
elif self.module.params["state"] == "remove_all":
task = vm.RemoveAllSnapshots()
else:
# This should not happen
assert False
if task:
self.wait_for_task(task)
if task.info.state == 'error':
result = {'changed': False, 'failed': True, 'msg': task.info.error.msg}
else:
result = {'changed': True, 'failed': False}
return result
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
state=dict(default='present', choices=['present', 'absent', 'revert', 'remove_all']),
name=dict(required=True, type='str'),
name_match=dict(type='str', choices=['first', 'last'], default='first'),
uuid=dict(type='str'),
folder=dict(type='str', default='/vm'),
datacenter=dict(required=True, type='str'),
snapshot_name=dict(type='str'),
description=dict(type='str', default=''),
quiesce=dict(type='bool', default=False),
memory_dump=dict(type='bool', default=False),
remove_children=dict(type='bool', default=False),
)
module = AnsibleModule(argument_spec=argument_spec, required_one_of=[['name', 'uuid']])
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.getvm(name=module.params['name'],
folder=module.params['folder'],
uuid=module.params['uuid'])
if not vm:
# If UUID is set, getvm select UUID, show error message accordingly.
if module.params['uuid'] is not None:
module.fail_json(msg="Unable to manage snapshots for non-existing VM %(uuid)s" % module.params)
else:
module.fail_json(msg="Unable to manage snapshots for non-existing VM %(name)s" % module.params)
if not module.params['snapshot_name'] and module.params['state'] != 'remove_all':
module.fail_json(msg="snapshot_name param is required when state is '%(state)s'" % module.params)
result = pyv.apply_snapshot_op(vm)
if 'failed' not in result:
result['failed'] = False
if result['failed']:
module.fail_json(**result)
else:
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
cafe-grader-team/cafe-grader-web | lib/assets/Lib/browser/indexed_db.py | 9 | 3114 | class EventListener:
def __init__(self, events=[]):
self._events=events
def append(self, event):
self._events.append(event)
def fire(self, e):
for _event in self._events:
_event(e)
class IndexedDB:
def __init__(self):
if not __BRYTHON__.has_indexedDB:
raise NotImplementedError("Your browser doesn't support indexedDB")
return
self._indexedDB=__BRYTHON__.indexedDB()
self._db=None
self._version=None
def _onsuccess(self, event):
self._db=event.target.result
def open(self, name, onsuccess, version=1.0, onerror=None,
onupgradeneeded=None):
self._version=version
_result=self._indexedDB.open(name, version)
_success=EventListener([self._onsuccess, onsuccess])
_result.onsuccess=_success.fire
_result.onupgradeneeded=onupgradeneeded
#if onerror is None:
def onerror(e):
print("onerror: %s:%s" % (e.type, e.target.result))
def onblocked(e):
print("blocked: %s:%s" % (e.type, e.result))
_result.onerror=onerror
_result.onblocked=onblocked
def transaction(self, entities, mode='read'):
return Transaction(self._db.transaction(entities, mode))
class Transaction:
def __init__(self, transaction):
self._transaction=transaction
def objectStore(self, name):
return ObjectStore(self._transaction.objectStore(name))
class ObjectStore:
def __init__(self, objectStore):
self._objectStore=objectStore
self._data=[]
def clear(self, onsuccess=None, onerror=None):
_result=self._objectStore.clear()
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def _helper(self, func, object, onsuccess=None, onerror=None):
_result=func(object)
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def put(self, obj, key=None, onsuccess=None, onerror=None):
_r = self._objectStore.put(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
def add(self, obj, key, onsuccess=None, onerror=None):
_r = self._objectStore.add(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
#self._helper(self._objectStore.add, object, onsuccess, onerror)
def delete(self, index, onsuccess=None, onerror=None):
self._helper(self._objectStore.delete, index, onsuccess, onerror)
def query(self, *args):
self._data=[]
def onsuccess(event):
cursor=event.target.result
if cursor is not None:
self._data.append(cursor.value)
getattr(cursor,"continue")() # cursor.continue() is illegal
self._objectStore.openCursor(args).onsuccess=onsuccess
def fetchall(self):
yield self._data
def get(self, key, onsuccess=None, onerror=None):
self._helper(self._objectStore.get, key, onsuccess, onerror)
| mit |
s20121035/rk3288_android5.1_repo | external/skia/platform_tools/android/gyp_gen/vars_dict_lib.py | 146 | 4422 | #!/usr/bin/python
# Copyright 2014 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import types
# The goal of this class is to store a set of unique items in the order in
# which they are inserted. This is important for the final makefile, where
# we want to make sure the image decoders are in a particular order. See
# images.gyp for more information.
class OrderedSet(object):
"""Ordered set of unique items that supports addition and removal.
Retains the order in which items are inserted.
"""
def __init__(self):
self.__ordered_set = []
def add(self, item):
"""Add item, if it is not already in the set.
item is appended to the end if it is not already in the set.
Args:
item: The item to add.
"""
if item not in self.__ordered_set:
self.__ordered_set.append(item)
def __contains__(self, item):
"""Whether the set contains item.
Args:
item: The item to search for in the set.
Returns:
bool: Whether the item is in the set.
"""
return item in self.__ordered_set
def __iter__(self):
"""Iterator for the set.
"""
return self.__ordered_set.__iter__()
def remove(self, item):
"""
Remove item from the set.
Args:
item: Item to be removed.
Raises:
ValueError if item is not in the set.
"""
self.__ordered_set.remove(item)
def __len__(self):
"""Number of items in the set.
"""
return len(self.__ordered_set)
def __getitem__(self, index):
"""Return item at index.
"""
return self.__ordered_set[index]
def reset(self):
"""Reset to empty.
"""
self.__ordered_set = []
def set(self, other):
"""Replace this ordered set with another.
Args:
other: OrderedSet to replace this one. After this call, this OrderedSet
will contain exactly the same elements as other.
"""
self.__ordered_set = list(other.__ordered_set)
VAR_NAMES = ['LOCAL_CFLAGS',
'LOCAL_CPPFLAGS',
'LOCAL_SRC_FILES',
'LOCAL_SHARED_LIBRARIES',
'LOCAL_STATIC_LIBRARIES',
'LOCAL_C_INCLUDES',
'LOCAL_EXPORT_C_INCLUDE_DIRS',
'DEFINES',
'KNOWN_TARGETS',
# These are not parsed by gyp, but set manually.
'LOCAL_MODULE_TAGS',
'LOCAL_MODULE']
class VarsDict(collections.namedtuple('VarsDict', VAR_NAMES)):
"""Custom class for storing the arguments to Android.mk variables.
Can also be treated as a dictionary with fixed keys.
"""
__slots__ = ()
def __new__(cls):
lists = []
# TODO (scroggo): Is there a better way add N items?
for __unused__ in range(len(VAR_NAMES)):
lists.append(OrderedSet())
return tuple.__new__(cls, lists)
def keys(self):
"""Return the field names as strings.
"""
return self._fields
def __getitem__(self, index):
"""Return an item, indexed by a number or a string.
"""
if type(index) == types.IntType:
# Treat the index as an array index into a tuple.
return tuple.__getitem__(self, index)
if type(index) == types.StringType:
# Treat the index as a key into a dictionary.
return eval('self.%s' % index)
return None
def intersect(var_dict_list):
"""Compute intersection of VarsDicts.
Find the intersection of a list of VarsDicts and trim each input to its
unique entries.
Args:
var_dict_list: list of VarsDicts. WARNING: each VarsDict will be
modified in place, to remove the common elements!
Returns:
VarsDict containing list entries common to all VarsDicts in
var_dict_list
"""
intersection = VarsDict()
# First VarsDict
var_dict_a = var_dict_list[0]
# The rest.
other_var_dicts = var_dict_list[1:]
for key in var_dict_a.keys():
# Copy A's list, so we can continue iterating after modifying the original.
a_list = list(var_dict_a[key])
for item in a_list:
# If item is in all lists, add to intersection, and remove from all.
in_all_lists = True
for var_dict in other_var_dicts:
if not item in var_dict[key]:
in_all_lists = False
break
if in_all_lists:
intersection[key].add(item)
for var_dict in var_dict_list:
var_dict[key].remove(item)
return intersection
| gpl-3.0 |
Pakketeretet2/lammps | tools/python/dump2pdb.py | 51 | 1217 | #!/usr/bin/env python
# Script: dump2pdb.py
# Purpose: convert a LAMMPS dump file to PDB format
# Syntax: dump2pdb.py dumpfile Nid Ntype Nx Ny Nz pdbfile template
# dumpfile = LAMMPS dump file in native LAMMPS format
# Nid,Ntype,Nx,Ny,Nz = columns #s for ID,type,x,y,z
# (usually 1,2,3,4,5)
# pdbfile = new PDB file
# template = PDB file to use as template for creating new PDB file
# this arg is optional, if not used a generic PDB file is created
# Author: Steve Plimpton (Sandia), sjplimp at sandia.gov
import sys,os
path = os.environ["LAMMPS_PYTHON_TOOLS"]
sys.path.append(path)
from dump import dump
from pdbfile import pdbfile
if len(sys.argv) != 8 and len(sys.argv) != 9:
raise StandardError, "Syntax: dump2pdb.py dumpfile Nid Ntype Nx Ny Nz pdbfile template"
dumpfile = sys.argv[1]
nid = int(sys.argv[2])
ntype = int(sys.argv[3])
nx = int(sys.argv[4])
ny = int(sys.argv[5])
nz = int(sys.argv[6])
pfile = sys.argv[7]
if len(sys.argv) == 9: template = sys.argv[8]
else: template = ""
d = dump(dumpfile)
d.map(nid,"id",ntype,"type",nx,"x",ny,"y",nz,"z")
if template: p = pdbfile(template,d)
else: p = pdbfile(d)
p.one(pfile)
| gpl-2.0 |
defivelo/db | apps/user/migrations/0064_merge_WS_in_VS.py | 1 | 4091 | from django.db import migrations, models
import multiselectfield.db.fields
from memoize import delete_memoized
from defivelo.roles import user_cantons
def ws_to_vs(apps, schema_editor):
UserManagedState = apps.get_model("user", "UserManagedState")
for ums in UserManagedState.objects.filter(canton="WS"):
if UserManagedState.objects.filter(user=ums.user, canton=ums.canton).exists():
ums.delete()
else:
ums.canton = "VS"
ums.save()
UserProfile = apps.get_model("user", "UserProfile")
for up in UserProfile.objects.all():
cantons = ["VS" if c == "WS" else c for c in up.activity_cantons]
up.activity_cantons = cantons
if up.affiliation_canton == "WS":
up.affiliation_canton = "VS"
up.save()
delete_memoized(user_cantons)
class Migration(migrations.Migration):
dependencies = [
("user", "0063_use_u2019_quote"),
]
operations = [
migrations.RunPython(ws_to_vs, migrations.RunPython.noop),
migrations.AlterField(
model_name="usermanagedstate",
name="canton",
field=models.CharField(
choices=[
("AG", "Aargau"),
("AR", "Appenzell Ausserrhoden"),
("BS", "Basel-Stadt"),
("BL", "Basel-Land"),
("BE", "Berne"),
("FR", "Fribourg"),
("GE", "Geneva"),
("GR", "Graubuenden"),
("JU", "Jura"),
("LU", "Lucerne"),
("NE", "Neuchatel"),
("SZ", "Schwyz"),
("SO", "Solothurn"),
("SG", "St. Gallen"),
("VS", "Valais"),
("VD", "Vaud"),
("ZH", "Zurich"),
],
max_length=2,
verbose_name="Canton",
),
),
migrations.AlterField(
model_name="userprofile",
name="activity_cantons",
field=multiselectfield.db.fields.MultiSelectField(
blank=True,
choices=[
("AG", "Aargau"),
("AR", "Appenzell Ausserrhoden"),
("BS", "Basel-Stadt"),
("BL", "Basel-Land"),
("BE", "Berne"),
("FR", "Fribourg"),
("GE", "Geneva"),
("GR", "Graubuenden"),
("JU", "Jura"),
("LU", "Lucerne"),
("NE", "Neuchatel"),
("SZ", "Schwyz"),
("SO", "Solothurn"),
("SG", "St. Gallen"),
("VS", "Valais"),
("VD", "Vaud"),
("ZH", "Zurich"),
],
max_length=50,
verbose_name="Défi Vélo mobile",
),
),
migrations.AlterField(
model_name="userprofile",
name="affiliation_canton",
field=models.CharField(
blank=True,
choices=[
("", "---------"),
("AG", "Aargau"),
("AR", "Appenzell Ausserrhoden"),
("BS", "Basel-Stadt"),
("BL", "Basel-Land"),
("BE", "Berne"),
("FR", "Fribourg"),
("GE", "Geneva"),
("GR", "Graubuenden"),
("JU", "Jura"),
("LU", "Lucerne"),
("NE", "Neuchatel"),
("SZ", "Schwyz"),
("SO", "Solothurn"),
("SG", "St. Gallen"),
("VS", "Valais"),
("VD", "Vaud"),
("ZH", "Zurich"),
],
max_length=2,
verbose_name="Canton d’affiliation",
),
),
]
| agpl-3.0 |
pigate/mongo-python-driver | pymongo/auth.py | 23 | 15619 | # Copyright 2013-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Authentication helpers."""
import hmac
HAVE_KERBEROS = True
try:
import kerberos
except ImportError:
HAVE_KERBEROS = False
from base64 import standard_b64decode, standard_b64encode
from collections import namedtuple
from hashlib import md5, sha1
from random import SystemRandom
from bson.binary import Binary
from bson.py3compat import b, string_type, _unicode, PY3
from bson.son import SON
from pymongo.errors import ConfigurationError, OperationFailure
MECHANISMS = frozenset(
['GSSAPI', 'MONGODB-CR', 'MONGODB-X509', 'PLAIN', 'SCRAM-SHA-1', 'DEFAULT'])
"""The authentication mechanisms supported by PyMongo."""
MongoCredential = namedtuple(
'MongoCredential',
['mechanism', 'source', 'username', 'password', 'mechanism_properties'])
"""A hashable namedtuple of values used for authentication."""
GSSAPIProperties = namedtuple('GSSAPIProperties', ['service_name'])
"""Mechanism properties for GSSAPI authentication."""
def _build_credentials_tuple(mech, source, user, passwd, extra):
"""Build and return a mechanism specific credentials tuple.
"""
user = _unicode(user)
if mech == 'GSSAPI':
properties = extra.get('authmechanismproperties', {})
service_name = properties.get('SERVICE_NAME', 'mongodb')
props = GSSAPIProperties(service_name=service_name)
# No password, source is always $external.
return MongoCredential(mech, '$external', user, None, props)
elif mech == 'MONGODB-X509':
return MongoCredential(mech, '$external', user, None, None)
else:
if passwd is None:
raise ConfigurationError("A password is required.")
return MongoCredential(mech, source, user, _unicode(passwd), None)
if PY3:
def _xor(fir, sec):
"""XOR two byte strings together (python 3.x)."""
return b"".join([bytes([x ^ y]) for x, y in zip(fir, sec)])
_from_bytes = int.from_bytes
_to_bytes = int.to_bytes
else:
from binascii import (hexlify as _hexlify,
unhexlify as _unhexlify)
def _xor(fir, sec):
"""XOR two byte strings together (python 2.x)."""
return b"".join([chr(ord(x) ^ ord(y)) for x, y in zip(fir, sec)])
def _from_bytes(value, dummy, int=int, _hexlify=_hexlify):
"""An implementation of int.from_bytes for python 2.x."""
return int(_hexlify(value), 16)
def _to_bytes(value, dummy0, dummy1, _unhexlify=_unhexlify):
"""An implementation of int.to_bytes for python 2.x."""
return _unhexlify('%040x' % value)
try:
# The fastest option, if it's been compiled to use OpenSSL's HMAC.
from backports.pbkdf2 import pbkdf2_hmac
def _hi(data, salt, iterations):
return pbkdf2_hmac('sha1', data, salt, iterations)
except ImportError:
try:
# Python 2.7.8+, or Python 3.4+.
from hashlib import pbkdf2_hmac
def _hi(data, salt, iterations):
return pbkdf2_hmac('sha1', data, salt, iterations)
except ImportError:
def _hi(data, salt, iterations):
"""A simple implementation of PBKDF2."""
mac = hmac.HMAC(data, None, sha1)
def _digest(msg, mac=mac):
"""Get a digest for msg."""
_mac = mac.copy()
_mac.update(msg)
return _mac.digest()
from_bytes = _from_bytes
to_bytes = _to_bytes
_u1 = _digest(salt + b'\x00\x00\x00\x01')
_ui = from_bytes(_u1, 'big')
for _ in range(iterations - 1):
_u1 = _digest(_u1)
_ui ^= from_bytes(_u1, 'big')
return to_bytes(_ui, 20, 'big')
try:
from hmac import compare_digest
except ImportError:
if PY3:
def _xor_bytes(a, b):
return a ^ b
else:
def _xor_bytes(a, b, _ord=ord):
return _ord(a) ^ _ord(b)
# Python 2.x < 2.7.7 and Python 3.x < 3.3
# References:
# - http://bugs.python.org/issue14532
# - http://bugs.python.org/issue14955
# - http://bugs.python.org/issue15061
def compare_digest(a, b, _xor_bytes=_xor_bytes):
left = None
right = b
if len(a) == len(b):
left = a
result = 0
if len(a) != len(b):
left = b
result = 1
for x, y in zip(left, right):
result |= _xor_bytes(x, y)
return result == 0
def _parse_scram_response(response):
"""Split a scram response into key, value pairs."""
return dict(item.split(b"=", 1) for item in response.split(b","))
def _authenticate_scram_sha1(credentials, sock_info):
"""Authenticate using SCRAM-SHA-1."""
username = credentials.username
password = credentials.password
source = credentials.source
# Make local
_hmac = hmac.HMAC
_sha1 = sha1
user = username.encode("utf-8").replace(b"=", b"=3D").replace(b",", b"=2C")
nonce = standard_b64encode(
(("%s" % (SystemRandom().random(),))[2:]).encode("utf-8"))
first_bare = b"n=" + user + b",r=" + nonce
cmd = SON([('saslStart', 1),
('mechanism', 'SCRAM-SHA-1'),
('payload', Binary(b"n,," + first_bare)),
('autoAuthorize', 1)])
res = sock_info.command(source, cmd)
server_first = res['payload']
parsed = _parse_scram_response(server_first)
iterations = int(parsed[b'i'])
salt = parsed[b's']
rnonce = parsed[b'r']
if not rnonce.startswith(nonce):
raise OperationFailure("Server returned an invalid nonce.")
without_proof = b"c=biws,r=" + rnonce
salted_pass = _hi(_password_digest(username, password).encode("utf-8"),
standard_b64decode(salt),
iterations)
client_key = _hmac(salted_pass, b"Client Key", _sha1).digest()
stored_key = _sha1(client_key).digest()
auth_msg = b",".join((first_bare, server_first, without_proof))
client_sig = _hmac(stored_key, auth_msg, _sha1).digest()
client_proof = b"p=" + standard_b64encode(_xor(client_key, client_sig))
client_final = b",".join((without_proof, client_proof))
server_key = _hmac(salted_pass, b"Server Key", _sha1).digest()
server_sig = standard_b64encode(
_hmac(server_key, auth_msg, _sha1).digest())
cmd = SON([('saslContinue', 1),
('conversationId', res['conversationId']),
('payload', Binary(client_final))])
res = sock_info.command(source, cmd)
parsed = _parse_scram_response(res['payload'])
if not compare_digest(parsed[b'v'], server_sig):
raise OperationFailure("Server returned an invalid signature.")
# Depending on how it's configured, Cyrus SASL (which the server uses)
# requires a third empty challenge.
if not res['done']:
cmd = SON([('saslContinue', 1),
('conversationId', res['conversationId']),
('payload', Binary(b''))])
res = sock_info.command(source, cmd)
if not res['done']:
raise OperationFailure('SASL conversation failed to complete.')
def _password_digest(username, password):
"""Get a password digest to use for authentication.
"""
if not isinstance(password, string_type):
raise TypeError("password must be an "
"instance of %s" % (string_type.__name__,))
if len(password) == 0:
raise ValueError("password can't be empty")
if not isinstance(username, string_type):
raise TypeError("password must be an "
"instance of %s" % (string_type.__name__,))
md5hash = md5()
data = "%s:mongo:%s" % (username, password)
md5hash.update(data.encode('utf-8'))
return _unicode(md5hash.hexdigest())
def _auth_key(nonce, username, password):
"""Get an auth key to use for authentication.
"""
digest = _password_digest(username, password)
md5hash = md5()
data = "%s%s%s" % (nonce, username, digest)
md5hash.update(data.encode('utf-8'))
return _unicode(md5hash.hexdigest())
def _authenticate_gssapi(credentials, sock_info):
"""Authenticate using GSSAPI.
"""
if not HAVE_KERBEROS:
raise ConfigurationError('The "kerberos" module must be '
'installed to use GSSAPI authentication.')
try:
username = credentials.username
gsn = credentials.mechanism_properties.service_name
# Starting here and continuing through the while loop below - establish
# the security context. See RFC 4752, Section 3.1, first paragraph.
host = sock_info.address[0]
result, ctx = kerberos.authGSSClientInit(
gsn + '@' + host, gssflags=kerberos.GSS_C_MUTUAL_FLAG)
if result != kerberos.AUTH_GSS_COMPLETE:
raise OperationFailure('Kerberos context failed to initialize.')
try:
# pykerberos uses a weird mix of exceptions and return values
# to indicate errors.
# 0 == continue, 1 == complete, -1 == error
# Only authGSSClientStep can return 0.
if kerberos.authGSSClientStep(ctx, '') != 0:
raise OperationFailure('Unknown kerberos '
'failure in step function.')
# Start a SASL conversation with mongod/s
# Note: pykerberos deals with base64 encoded byte strings.
# Since mongo accepts base64 strings as the payload we don't
# have to use bson.binary.Binary.
payload = kerberos.authGSSClientResponse(ctx)
cmd = SON([('saslStart', 1),
('mechanism', 'GSSAPI'),
('payload', payload),
('autoAuthorize', 1)])
response = sock_info.command('$external', cmd)
# Limit how many times we loop to catch protocol / library issues
for _ in range(10):
result = kerberos.authGSSClientStep(ctx,
str(response['payload']))
if result == -1:
raise OperationFailure('Unknown kerberos '
'failure in step function.')
payload = kerberos.authGSSClientResponse(ctx) or ''
cmd = SON([('saslContinue', 1),
('conversationId', response['conversationId']),
('payload', payload)])
response = sock_info.command('$external', cmd)
if result == kerberos.AUTH_GSS_COMPLETE:
break
else:
raise OperationFailure('Kerberos '
'authentication failed to complete.')
# Once the security context is established actually authenticate.
# See RFC 4752, Section 3.1, last two paragraphs.
if kerberos.authGSSClientUnwrap(ctx,
str(response['payload'])) != 1:
raise OperationFailure('Unknown kerberos '
'failure during GSS_Unwrap step.')
if kerberos.authGSSClientWrap(ctx,
kerberos.authGSSClientResponse(ctx),
username) != 1:
raise OperationFailure('Unknown kerberos '
'failure during GSS_Wrap step.')
payload = kerberos.authGSSClientResponse(ctx)
cmd = SON([('saslContinue', 1),
('conversationId', response['conversationId']),
('payload', payload)])
sock_info.command('$external', cmd)
finally:
kerberos.authGSSClientClean(ctx)
except kerberos.KrbError as exc:
raise OperationFailure(str(exc))
def _authenticate_plain(credentials, sock_info):
"""Authenticate using SASL PLAIN (RFC 4616)
"""
source = credentials.source
username = credentials.username
password = credentials.password
payload = ('\x00%s\x00%s' % (username, password)).encode('utf-8')
cmd = SON([('saslStart', 1),
('mechanism', 'PLAIN'),
('payload', Binary(payload)),
('autoAuthorize', 1)])
sock_info.command(source, cmd)
def _authenticate_cram_md5(credentials, sock_info):
"""Authenticate using CRAM-MD5 (RFC 2195)
"""
source = credentials.source
username = credentials.username
password = credentials.password
# The password used as the mac key is the
# same as what we use for MONGODB-CR
passwd = _password_digest(username, password)
cmd = SON([('saslStart', 1),
('mechanism', 'CRAM-MD5'),
('payload', Binary(b'')),
('autoAuthorize', 1)])
response = sock_info.command(source, cmd)
# MD5 as implicit default digest for digestmod is deprecated
# in python 3.4
mac = hmac.HMAC(key=passwd.encode('utf-8'), digestmod=md5)
mac.update(response['payload'])
challenge = username.encode('utf-8') + b' ' + b(mac.hexdigest())
cmd = SON([('saslContinue', 1),
('conversationId', response['conversationId']),
('payload', Binary(challenge))])
sock_info.command(source, cmd)
def _authenticate_x509(credentials, sock_info):
"""Authenticate using MONGODB-X509.
"""
query = SON([('authenticate', 1),
('mechanism', 'MONGODB-X509'),
('user', credentials.username)])
sock_info.command('$external', query)
def _authenticate_mongo_cr(credentials, sock_info):
"""Authenticate using MONGODB-CR.
"""
source = credentials.source
username = credentials.username
password = credentials.password
# Get a nonce
response = sock_info.command(source, {'getnonce': 1})
nonce = response['nonce']
key = _auth_key(nonce, username, password)
# Actually authenticate
query = SON([('authenticate', 1),
('user', username),
('nonce', nonce),
('key', key)])
sock_info.command(source, query)
def _authenticate_default(credentials, sock_info):
if sock_info.max_wire_version >= 3:
return _authenticate_scram_sha1(credentials, sock_info)
else:
return _authenticate_mongo_cr(credentials, sock_info)
_AUTH_MAP = {
'CRAM-MD5': _authenticate_cram_md5,
'GSSAPI': _authenticate_gssapi,
'MONGODB-CR': _authenticate_mongo_cr,
'MONGODB-X509': _authenticate_x509,
'PLAIN': _authenticate_plain,
'SCRAM-SHA-1': _authenticate_scram_sha1,
'DEFAULT': _authenticate_default,
}
def authenticate(credentials, sock_info):
"""Authenticate sock_info."""
mechanism = credentials.mechanism
auth_func = _AUTH_MAP.get(mechanism)
auth_func(credentials, sock_info)
def logout(source, sock_info):
"""Log out from a database."""
sock_info.command(source, {'logout': 1})
| apache-2.0 |
jbeee/jquery-pjaxr | test_app/tests/test_ignored_metatag.py | 2 | 1373 | from __future__ import unicode_literals
from selenium.common.exceptions import NoSuchElementException
from .helpers import SeleniumTestCase
class IgnoredMetatagTest(SeleniumTestCase):
def test_ignored_metatag_pjaxr(self):
self.browser_get_reverse('index')
self.assert_title('index-title')
self.assert_content('index-content')
ignored_metatag_link = self.browser.find_element_by_css_selector('#ignored-metatag-link')
ignored_metatag_link.click()
self.wait.until(lambda browser: browser.title == 'ignored-metatag-title')
self.assert_title('ignored-metatag-title')
self.assert_content('ignored-metatag-content')
self.assert_body_attr('pjaxr-success', 'true')
self.assert_body_attr('pjaxr-done', 'true')
# ignored metatags won't raise an error nor be processed - more: #12
with self.assertRaises(NoSuchElementException):
self.browser.find_element_by_css_selector('meta[http-equiv="X-UA-Compatible"]')
def test_ignored_metatag_initial(self):
self.browser_get_reverse('ignored_metatag')
self.assert_title('ignored-metatag-title')
self.assert_content('ignored-metatag-content')
# ignored metatags aren't affected by normal rendering
self.browser.find_element_by_css_selector('meta[http-equiv="X-UA-Compatible"]')
| mit |
ns950/calibre | src/calibre/ebooks/mobi/debug/headers.py | 14 | 25842 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid@kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import struct, datetime, os
from calibre.utils.date import utc_tz
from calibre.ebooks.mobi.reader.headers import NULL_INDEX
from calibre.ebooks.mobi.langcodes import main_language, sub_language
from calibre.ebooks.mobi.debug import format_bytes
from calibre.ebooks.mobi.utils import get_trailing_data
# PalmDB {{{
class PalmDOCAttributes(object):
class Attr(object):
def __init__(self, name, field, val):
self.name = name
self.val = val & field
def __str__(self):
return '%s: %s'%(self.name, bool(self.val))
def __init__(self, raw):
self.val = struct.unpack(b'<H', raw)[0]
self.attributes = []
for name, field in [('Read Only', 0x02), ('Dirty AppInfoArea', 0x04),
('Backup this database', 0x08),
('Okay to install newer over existing copy, if present on PalmPilot', 0x10),
('Force the PalmPilot to reset after this database is installed', 0x12),
('Don\'t allow copy of file to be beamed to other Pilot',
0x14)]:
self.attributes.append(PalmDOCAttributes.Attr(name, field,
self.val))
def __str__(self):
attrs = '\n\t'.join([str(x) for x in self.attributes])
return 'PalmDOC Attributes: %s\n\t%s'%(bin(self.val), attrs)
class PalmDB(object):
def __init__(self, raw):
self.raw = raw
if self.raw.startswith(b'TPZ'):
raise ValueError('This is a Topaz file')
self.name = self.raw[:32].replace(b'\x00', b'')
self.attributes = PalmDOCAttributes(self.raw[32:34])
self.version = struct.unpack(b'>H', self.raw[34:36])[0]
palm_epoch = datetime.datetime(1904, 1, 1, tzinfo=utc_tz)
self.creation_date_raw = struct.unpack(b'>I', self.raw[36:40])[0]
self.creation_date = (palm_epoch +
datetime.timedelta(seconds=self.creation_date_raw))
self.modification_date_raw = struct.unpack(b'>I', self.raw[40:44])[0]
self.modification_date = (palm_epoch +
datetime.timedelta(seconds=self.modification_date_raw))
self.last_backup_date_raw = struct.unpack(b'>I', self.raw[44:48])[0]
self.last_backup_date = (palm_epoch +
datetime.timedelta(seconds=self.last_backup_date_raw))
self.modification_number = struct.unpack(b'>I', self.raw[48:52])[0]
self.app_info_id = self.raw[52:56]
self.sort_info_id = self.raw[56:60]
self.type = self.raw[60:64]
self.creator = self.raw[64:68]
self.ident = self.type + self.creator
if self.ident not in (b'BOOKMOBI', b'TEXTREAD'):
raise ValueError('Unknown book ident: %r'%self.ident)
self.last_record_uid, = struct.unpack(b'>I', self.raw[68:72])
self.next_rec_list_id = self.raw[72:76]
self.number_of_records, = struct.unpack(b'>H', self.raw[76:78])
def __str__(self):
ans = ['*'*20 + ' PalmDB Header '+ '*'*20]
ans.append('Name: %r'%self.name)
ans.append(str(self.attributes))
ans.append('Version: %s'%self.version)
ans.append('Creation date: %s (%s)'%(self.creation_date.isoformat(),
self.creation_date_raw))
ans.append('Modification date: %s (%s)'%(self.modification_date.isoformat(),
self.modification_date_raw))
ans.append('Backup date: %s (%s)'%(self.last_backup_date.isoformat(),
self.last_backup_date_raw))
ans.append('Modification number: %s'%self.modification_number)
ans.append('App Info ID: %r'%self.app_info_id)
ans.append('Sort Info ID: %r'%self.sort_info_id)
ans.append('Type: %r'%self.type)
ans.append('Creator: %r'%self.creator)
ans.append('Last record UID +1: %r'%self.last_record_uid)
ans.append('Next record list id: %r'%self.next_rec_list_id)
ans.append('Number of records: %s'%self.number_of_records)
return '\n'.join(ans)
# }}}
class Record(object): # {{{
def __init__(self, raw, header):
self.offset, self.flags, self.uid = header
self.raw = raw
@property
def header(self):
return 'Offset: %d Flags: %d UID: %d First 4 bytes: %r Size: %d'%(self.offset, self.flags,
self.uid, self.raw[:4], len(self.raw))
# }}}
# EXTH {{{
class EXTHRecord(object):
def __init__(self, type_, data, length):
self.type = type_
self.data = data
self.length = length
self.name = {
1 : 'Drm Server Id',
2 : 'Drm Commerce Id',
3 : 'Drm Ebookbase Book Id',
100 : 'Creator',
101 : 'Publisher',
102 : 'Imprint',
103 : 'Description',
104 : 'ISBN',
105 : 'Subject',
106 : 'Published',
107 : 'Review',
108 : 'Contributor',
109 : 'Rights',
110 : 'SubjectCode',
111 : 'Type',
112 : 'Source',
113 : 'ASIN',
114 : 'versionNumber',
115 : 'sample',
116 : 'StartOffset',
117 : 'Adult',
118 : 'Price',
119 : 'Currency',
121 : 'KF8_Boundary_Section',
122 : 'fixed-layout',
123 : 'book-type',
124 : 'orientation-lock',
125 : 'KF8_Count_of_Resources_Fonts_Images',
126 : 'original-resolution',
127 : 'zero-gutter',
128 : 'zero-margin',
129 : 'KF8_Masthead/Cover_Image',
131 : 'KF8_Unidentified_Count',
132 : 'RegionMagnification',
200 : 'DictShortName',
201 : 'CoverOffset',
202 : 'ThumbOffset',
203 : 'Fake Cover',
204 : 'Creator Software',
205 : 'Creator Major Version', # '>I'
206 : 'Creator Minor Version', # '>I'
207 : 'Creator Build Number', # '>I'
208 : 'Watermark',
209 : 'Tamper Proof Keys [hex]',
300 : 'Font Signature [hex]',
301 : 'Clipping Limit [3xx]', # percentage '>B'
401 : 'Clipping Limit', # percentage '>B'
402 : 'Publisher Limit',
404 : 'Text to Speech Disabled', # '>B' 1 - TTS disabled 0 - TTS enabled
501 : 'CDE Type', # 4 chars (PDOC, EBOK, MAGZ, ...)
502 : 'last_update_time',
503 : 'Updated Title',
504 : 'ASIN [5xx]',
508 : 'Unknown Title Furigana?',
517 : 'Unknown Creator Furigana?',
522 : 'Unknown Publisher Furigana?',
524 : 'Language',
525 : 'primary-writing-mode',
527 : 'page-progression-direction',
528 : 'Override Kindle fonts',
534 : 'Input Source Type',
535 : 'Kindlegen Build-Rev Number',
536 : 'Container Info', # CONT_Header is 0, Ends with CONTAINER_BOUNDARY (or Asset_Type?)
538 : 'Container Resolution',
539 : 'Container Mimetype',
543 : 'Container id', # FONT_CONTAINER, BW_CONTAINER, HD_CONTAINER
}.get(self.type, repr(self.type))
if (self.name in {'sample', 'StartOffset', 'CoverOffset', 'ThumbOffset', 'Fake Cover',
'Creator Software', 'Creator Major Version', 'Creator Minor Version',
'Creator Build Number', 'Clipping Limit (3xx)', 'Clipping Limit',
'Publisher Limit', 'Text to Speech Disabled'} or
self.type in {121, 125, 131}):
if self.length == 9:
self.data, = struct.unpack(b'>B', self.data)
elif self.length == 10:
self.data, = struct.unpack(b'>H', self.data)
else:
self.data, = struct.unpack(b'>L', self.data)
elif self.type in {209, 300}:
self.data = bytes(self.data.encode('hex'))
def __str__(self):
return '%s (%d): %r'%(self.name, self.type, self.data)
class EXTHHeader(object):
def __init__(self, raw):
self.raw = raw
if not self.raw.startswith(b'EXTH'):
raise ValueError('EXTH header does not start with EXTH')
self.length, = struct.unpack(b'>L', self.raw[4:8])
self.count, = struct.unpack(b'>L', self.raw[8:12])
pos = 12
self.records = []
for i in xrange(self.count):
pos = self.read_record(pos)
self.records.sort(key=lambda x:x.type)
self.rmap = {x.type:x for x in self.records}
def __getitem__(self, type_):
return self.rmap.__getitem__(type_).data
def get(self, type_, default=None):
ans = self.rmap.get(type_, default)
return getattr(ans, 'data', default)
def read_record(self, pos):
type_, length = struct.unpack(b'>LL', self.raw[pos:pos+8])
data = self.raw[(pos+8):(pos+length)]
self.records.append(EXTHRecord(type_, data, length))
return pos + length
@property
def kf8_header_index(self):
ans = self.get(121, None)
if ans == NULL_INDEX:
ans = None
return ans
def __str__(self):
ans = ['*'*20 + ' EXTH Header '+ '*'*20]
ans.append('EXTH header length: %d'%self.length)
ans.append('Number of EXTH records: %d'%self.count)
ans.append('EXTH records...')
for r in self.records:
ans.append(str(r))
return '\n'.join(ans)
# }}}
class MOBIHeader(object): # {{{
def __init__(self, record0, offset):
self.raw = record0.raw
self.header_offset = offset
self.compression_raw = self.raw[:2]
self.compression = {1: 'No compression', 2: 'PalmDoc compression',
17480: 'HUFF/CDIC compression'}.get(struct.unpack(b'>H',
self.compression_raw)[0],
repr(self.compression_raw))
self.unused = self.raw[2:4]
self.text_length, = struct.unpack(b'>I', self.raw[4:8])
self.number_of_text_records, self.text_record_size = \
struct.unpack(b'>HH', self.raw[8:12])
self.encryption_type_raw, = struct.unpack(b'>H', self.raw[12:14])
self.encryption_type = {
0: 'No encryption',
1: 'Old mobipocket encryption',
2: 'Mobipocket encryption'
}.get(self.encryption_type_raw, repr(self.encryption_type_raw))
self.unknown = self.raw[14:16]
self.identifier = self.raw[16:20]
if self.identifier != b'MOBI':
raise ValueError('Identifier %r unknown'%self.identifier)
self.length, = struct.unpack(b'>I', self.raw[20:24])
self.type_raw, = struct.unpack(b'>I', self.raw[24:28])
self.type = {
2 : 'Mobipocket book',
3 : 'PalmDOC book',
4 : 'Audio',
257 : 'News',
258 : 'News Feed',
259 : 'News magazine',
513 : 'PICS',
514 : 'Word',
515 : 'XLS',
516 : 'PPT',
517 : 'TEXT',
518 : 'HTML',
}.get(self.type_raw, repr(self.type_raw))
self.encoding_raw, = struct.unpack(b'>I', self.raw[28:32])
self.encoding = {
1252 : 'cp1252',
65001: 'utf-8',
}.get(self.encoding_raw, repr(self.encoding_raw))
self.uid = self.raw[32:36]
self.file_version, = struct.unpack(b'>I', self.raw[36:40])
self.meta_orth_indx, self.meta_infl_indx = struct.unpack(
b'>II', self.raw[40:48])
self.secondary_index_record, = struct.unpack(b'>I', self.raw[48:52])
self.reserved = self.raw[52:80]
self.first_non_book_record, = struct.unpack(b'>I', self.raw[80:84])
self.fullname_offset, = struct.unpack(b'>I', self.raw[84:88])
self.fullname_length, = struct.unpack(b'>I', self.raw[88:92])
self.locale_raw, = struct.unpack(b'>I', self.raw[92:96])
langcode = self.locale_raw
langid = langcode & 0xFF
sublangid = (langcode >> 10) & 0xFF
self.language = main_language.get(langid, 'ENGLISH')
self.sublanguage = sub_language.get(sublangid, 'NEUTRAL')
self.input_language = self.raw[96:100]
self.output_langauage = self.raw[100:104]
self.min_version, = struct.unpack(b'>I', self.raw[104:108])
self.first_image_index, = struct.unpack(b'>I', self.raw[108:112])
self.huffman_record_offset, = struct.unpack(b'>I', self.raw[112:116])
self.huffman_record_count, = struct.unpack(b'>I', self.raw[116:120])
self.datp_record_offset, = struct.unpack(b'>I', self.raw[120:124])
self.datp_record_count, = struct.unpack(b'>I', self.raw[124:128])
self.exth_flags, = struct.unpack(b'>I', self.raw[128:132])
self.has_exth = bool(self.exth_flags & 0x40)
self.has_drm_data = self.length >= 174 and len(self.raw) >= 184
if self.has_drm_data:
self.unknown3 = self.raw[132:168]
self.drm_offset, self.drm_count, self.drm_size, self.drm_flags = \
struct.unpack(b'>4I', self.raw[168:184])
self.has_extra_data_flags = self.length >= 232 and len(self.raw) >= 232+16
self.has_fcis_flis = False
self.has_multibytes = self.has_indexing_bytes = self.has_uncrossable_breaks = False
self.extra_data_flags = 0
if self.has_extra_data_flags:
self.unknown4 = self.raw[184:192]
if self.file_version < 8:
self.first_text_record, self.last_text_record = \
struct.unpack_from(b'>HH', self.raw, 192)
self.fdst_count = struct.unpack_from(b'>L', self.raw, 196)
else:
self.fdst_idx, self.fdst_count = struct.unpack_from(b'>LL',
self.raw, 192)
if self.fdst_count <= 1:
self.fdst_idx = NULL_INDEX
(self.fcis_number, self.fcis_count, self.flis_number,
self.flis_count) = struct.unpack(b'>IIII',
self.raw[200:216])
self.unknown6 = self.raw[216:224]
self.srcs_record_index = struct.unpack(b'>I',
self.raw[224:228])[0]
self.num_srcs_records = struct.unpack(b'>I',
self.raw[228:232])[0]
self.unknown7 = self.raw[232:240]
self.extra_data_flags = struct.unpack(b'>I',
self.raw[240:244])[0]
self.has_multibytes = bool(self.extra_data_flags & 0b1)
self.has_indexing_bytes = bool(self.extra_data_flags & 0b10)
self.has_uncrossable_breaks = bool(self.extra_data_flags & 0b100)
self.primary_index_record, = struct.unpack(b'>I',
self.raw[244:248])
if self.length >= 248:
(self.sect_idx, self.skel_idx, self.datp_idx, self.oth_idx
) = struct.unpack_from(b'>4L', self.raw, 248)
self.unknown9 = self.raw[264:self.length+16]
if self.meta_orth_indx not in {NULL_INDEX, self.sect_idx}:
raise ValueError('KF8 header has different Meta orth and '
'section indices')
# The following are all relative to the position of the header record
# make them absolute for ease of debugging
self.relative_records = {'sect_idx', 'skel_idx', 'datp_idx', 'oth_idx',
'meta_orth_indx', 'huffman_record_offset',
'first_non_book_record', 'datp_record_offset', 'fcis_number',
'flis_number', 'primary_index_record', 'fdst_idx',
'first_image_index'}
for x in self.relative_records:
if hasattr(self, x) and getattr(self, x) != NULL_INDEX:
setattr(self, x, self.header_offset+getattr(self, x))
# Try to find the first non-text record
self.first_resource_record = offset + 1 + self.number_of_text_records # Default to first record after all text records
pointer = min(getattr(self, 'first_non_book_record', NULL_INDEX), getattr(self, 'first_image_index', NULL_INDEX))
if pointer != NULL_INDEX:
self.first_resource_record = max(pointer, self.first_resource_record)
self.last_resource_record = NULL_INDEX
if self.has_exth:
self.exth_offset = 16 + self.length
self.exth = EXTHHeader(self.raw[self.exth_offset:])
self.end_of_exth = self.exth_offset + self.exth.length
self.bytes_after_exth = self.raw[self.end_of_exth:self.fullname_offset]
if self.exth.kf8_header_index is not None and offset == 0:
# MOBI 6 header in a joint file, adjust self.last_resource_record
self.last_resource_record = self.exth.kf8_header_index - 2
def __str__(self):
ans = ['*'*20 + ' MOBI %d Header '%self.file_version+ '*'*20]
a = ans.append
def i(d, x):
x = 'NULL' if x == NULL_INDEX else x
a('%s: %s'%(d, x))
def r(d, attr):
x = getattr(self, attr)
if attr in self.relative_records and x != NULL_INDEX:
a('%s: Absolute: %d Relative: %d'%(d, x, x-self.header_offset))
else:
i(d, x)
a('Compression: %s'%self.compression)
a('Unused: %r'%self.unused)
a('Text length: %d'%self.text_length)
a('Number of text records: %d'%self.number_of_text_records)
a('Text record size: %d'%self.text_record_size)
a('Encryption: %s'%self.encryption_type)
a('Unknown: %r'%self.unknown)
a('Identifier: %r'%self.identifier)
a('Header length: %d'% self.length)
a('Type: %s'%self.type)
a('Encoding: %s'%self.encoding)
a('UID: %r'%self.uid)
a('File version: %d'%self.file_version)
r('Meta Orth Index', 'meta_orth_indx')
r('Meta Infl Index', 'meta_infl_indx')
r('Secondary index record', 'secondary_index_record')
a('Reserved: %r'%self.reserved)
r('First non-book record', 'first_non_book_record')
a('Full name offset: %d'%self.fullname_offset)
a('Full name length: %d bytes'%self.fullname_length)
a('Langcode: %r'%self.locale_raw)
a('Language: %s'%self.language)
a('Sub language: %s'%self.sublanguage)
a('Input language: %r'%self.input_language)
a('Output language: %r'%self.output_langauage)
a('Min version: %d'%self.min_version)
r('First Image index', 'first_image_index')
r('Huffman record offset', 'huffman_record_offset')
a('Huffman record count: %d'%self.huffman_record_count)
r('Huffman table offset', 'datp_record_offset')
a('Huffman table length: %r'%self.datp_record_count)
a('EXTH flags: %s (%s)'%(bin(self.exth_flags)[2:], self.has_exth))
if self.has_drm_data:
a('Unknown3: %r'%self.unknown3)
r('DRM Offset', 'drm_offset')
a('DRM Count: %s'%self.drm_count)
a('DRM Size: %s'%self.drm_size)
a('DRM Flags: %r'%self.drm_flags)
if self.has_extra_data_flags:
a('Unknown4: %r'%self.unknown4)
if hasattr(self, 'first_text_record'):
a('First content record: %d'%self.first_text_record)
a('Last content record: %d'%self.last_text_record)
else:
r('FDST Index', 'fdst_idx')
a('FDST Count: %d'% self.fdst_count)
r('FCIS number', 'fcis_number')
a('FCIS count: %d'% self.fcis_count)
r('FLIS number', 'flis_number')
a('FLIS count: %d'% self.flis_count)
a('Unknown6: %r'% self.unknown6)
r('SRCS record index', 'srcs_record_index')
a('Number of SRCS records?: %d'%self.num_srcs_records)
a('Unknown7: %r'%self.unknown7)
a(('Extra data flags: %s (has multibyte: %s) '
'(has indexing: %s) (has uncrossable breaks: %s)')%(
bin(self.extra_data_flags), self.has_multibytes,
self.has_indexing_bytes, self.has_uncrossable_breaks))
r('NCX index', 'primary_index_record')
if self.length >= 248:
r('Sections Index', 'sect_idx')
r('SKEL Index', 'skel_idx')
r('DATP Index', 'datp_idx')
r('Other Index', 'oth_idx')
if self.unknown9:
a('Unknown9: %r'%self.unknown9)
ans = '\n'.join(ans)
if self.has_exth:
ans += '\n\n' + str(self.exth)
ans += '\n\nBytes after EXTH (%d bytes): %s'%(
len(self.bytes_after_exth),
format_bytes(self.bytes_after_exth))
ans += '\nNumber of bytes after full name: %d' % (len(self.raw) - (self.fullname_offset +
self.fullname_length))
ans += '\nRecord 0 length: %d'%len(self.raw)
return ans
# }}}
class MOBIFile(object):
def __init__(self, stream):
self.raw = stream.read()
self.palmdb = PalmDB(self.raw[:78])
self.record_headers = []
self.records = []
for i in xrange(self.palmdb.number_of_records):
pos = 78 + i * 8
offset, a1, a2, a3, a4 = struct.unpack(b'>LBBBB', self.raw[pos:pos+8])
flags, val = a1, a2 << 16 | a3 << 8 | a4
self.record_headers.append((offset, flags, val))
def section(section_number):
if section_number == self.palmdb.number_of_records - 1:
end_off = len(self.raw)
else:
end_off = self.record_headers[section_number + 1][0]
off = self.record_headers[section_number][0]
return self.raw[off:end_off]
for i in range(self.palmdb.number_of_records):
self.records.append(Record(section(i), self.record_headers[i]))
self.mobi_header = MOBIHeader(self.records[0], 0)
self.huffman_record_nums = []
self.kf8_type = None
mh = mh8 = self.mobi_header
if mh.file_version >= 8:
self.kf8_type = 'standalone'
elif mh.has_exth and mh.exth.kf8_header_index is not None:
kf8i = mh.exth.kf8_header_index
try:
rec = self.records[kf8i-1]
except IndexError:
pass
else:
if rec.raw == b'BOUNDARY':
self.kf8_type = 'joint'
mh8 = MOBIHeader(self.records[kf8i], kf8i)
self.mobi8_header = mh8
if 'huff' in self.mobi_header.compression.lower():
from calibre.ebooks.mobi.huffcdic import HuffReader
def huffit(off, cnt):
huffman_record_nums = list(xrange(off, off+cnt))
huffrecs = [self.records[r].raw for r in huffman_record_nums]
huffs = HuffReader(huffrecs)
return huffman_record_nums, huffs.unpack
if self.kf8_type == 'joint':
recs6, d6 = huffit(mh.huffman_record_offset,
mh.huffman_record_count)
recs8, d8 = huffit(mh8.huffman_record_offset,
mh8.huffman_record_count)
self.huffman_record_nums = recs6 + recs8
else:
self.huffman_record_nums, d6 = huffit(mh.huffman_record_offset,
mh.huffman_record_count)
d8 = d6
elif 'palmdoc' in self.mobi_header.compression.lower():
from calibre.ebooks.compression.palmdoc import decompress_doc
d8 = d6 = decompress_doc
else:
d8 = d6 = lambda x: x
self.decompress6, self.decompress8 = d6, d8
class TextRecord(object): # {{{
def __init__(self, idx, record, extra_data_flags, decompress):
self.trailing_data, self.raw = get_trailing_data(record.raw, extra_data_flags)
raw_trailing_bytes = record.raw[len(self.raw):]
self.raw = decompress(self.raw)
if 0 in self.trailing_data:
self.trailing_data['multibyte_overlap'] = self.trailing_data.pop(0)
if 1 in self.trailing_data:
self.trailing_data['indexing'] = self.trailing_data.pop(1)
if 2 in self.trailing_data:
self.trailing_data['uncrossable_breaks'] = self.trailing_data.pop(2)
self.trailing_data['raw_bytes'] = raw_trailing_bytes
for typ, val in self.trailing_data.iteritems():
if isinstance(typ, int):
print ('Record %d has unknown trailing data of type: %d : %r'%
(idx, typ, val))
self.idx = idx
def dump(self, folder):
name = '%06d'%self.idx
with open(os.path.join(folder, name+'.txt'), 'wb') as f:
f.write(self.raw)
with open(os.path.join(folder, name+'.trailing_data'), 'wb') as f:
for k, v in self.trailing_data.iteritems():
raw = '%s : %r\n\n'%(k, v)
f.write(raw.encode('utf-8'))
def __len__(self):
return len(self.raw)
# }}}
| gpl-3.0 |
FusionSP/android_external_chromium_org | tools/telemetry/telemetry/util/cloud_storage.py | 25 | 7906 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrappers for gsutil, for basic interaction with Google Cloud Storage."""
import contextlib
import cStringIO
import hashlib
import logging
import os
import subprocess
import sys
import tarfile
import urllib2
from telemetry.core import platform
from telemetry.util import path
PUBLIC_BUCKET = 'chromium-telemetry'
PARTNER_BUCKET = 'chrome-partner-telemetry'
INTERNAL_BUCKET = 'chrome-telemetry'
BUCKET_ALIASES = {
'public': PUBLIC_BUCKET,
'partner': PARTNER_BUCKET,
'internal': INTERNAL_BUCKET,
}
_GSUTIL_URL = 'http://storage.googleapis.com/pub/gsutil.tar.gz'
_DOWNLOAD_PATH = os.path.join(path.GetTelemetryDir(), 'third_party', 'gsutil')
# TODO(tbarzic): A workaround for http://crbug.com/386416 and
# http://crbug.com/359293. See |_RunCommand|.
_CROS_GSUTIL_HOME_WAR = '/home/chromeos-test/'
class CloudStorageError(Exception):
@staticmethod
def _GetConfigInstructions(gsutil_path):
if SupportsProdaccess(gsutil_path) and _FindExecutableInPath('prodaccess'):
return 'Run prodaccess to authenticate.'
else:
if platform.GetHostPlatform().GetOSName() == 'chromeos':
gsutil_path = ('HOME=%s %s' % (_CROS_GSUTIL_HOME_WAR, gsutil_path))
return ('To configure your credentials:\n'
' 1. Run "%s config" and follow its instructions.\n'
' 2. If you have a @google.com account, use that account.\n'
' 3. For the project-id, just enter 0.' % gsutil_path)
class PermissionError(CloudStorageError):
def __init__(self, gsutil_path):
super(PermissionError, self).__init__(
'Attempted to access a file from Cloud Storage but you don\'t '
'have permission. ' + self._GetConfigInstructions(gsutil_path))
class CredentialsError(CloudStorageError):
def __init__(self, gsutil_path):
super(CredentialsError, self).__init__(
'Attempted to access a file from Cloud Storage but you have no '
'configured credentials. ' + self._GetConfigInstructions(gsutil_path))
class NotFoundError(CloudStorageError):
pass
# TODO(tonyg/dtu): Can this be replaced with distutils.spawn.find_executable()?
def _FindExecutableInPath(relative_executable_path, *extra_search_paths):
search_paths = list(extra_search_paths) + os.environ['PATH'].split(os.pathsep)
for search_path in search_paths:
executable_path = os.path.join(search_path, relative_executable_path)
if path.IsExecutable(executable_path):
return executable_path
return None
def _DownloadGsutil():
logging.info('Downloading gsutil')
with contextlib.closing(urllib2.urlopen(_GSUTIL_URL, timeout=60)) as response:
with tarfile.open(fileobj=cStringIO.StringIO(response.read())) as tar_file:
tar_file.extractall(os.path.dirname(_DOWNLOAD_PATH))
logging.info('Downloaded gsutil to %s' % _DOWNLOAD_PATH)
return os.path.join(_DOWNLOAD_PATH, 'gsutil')
def FindGsutil():
"""Return the gsutil executable path. If we can't find it, download it."""
# Look for a depot_tools installation.
gsutil_path = _FindExecutableInPath(
os.path.join('third_party', 'gsutil', 'gsutil'), _DOWNLOAD_PATH)
if gsutil_path:
return gsutil_path
# Look for a gsutil installation.
gsutil_path = _FindExecutableInPath('gsutil', _DOWNLOAD_PATH)
if gsutil_path:
return gsutil_path
# Failed to find it. Download it!
return _DownloadGsutil()
def SupportsProdaccess(gsutil_path):
with open(gsutil_path, 'r') as gsutil:
return 'prodaccess' in gsutil.read()
def _RunCommand(args):
gsutil_path = FindGsutil()
# On cros device, as telemetry is running as root, home will be set to /root/,
# which is not writable. gsutil will attempt to create a download tracker dir
# in home dir and fail. To avoid this, override HOME dir to something writable
# when running on cros device.
#
# TODO(tbarzic): Figure out a better way to handle gsutil on cros.
# http://crbug.com/386416, http://crbug.com/359293.
gsutil_env = None
if platform.GetHostPlatform().GetOSName() == 'chromeos':
gsutil_env = os.environ.copy()
gsutil_env['HOME'] = _CROS_GSUTIL_HOME_WAR
gsutil = subprocess.Popen([sys.executable, gsutil_path] + args,
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
env=gsutil_env)
stdout, stderr = gsutil.communicate()
if gsutil.returncode:
if stderr.startswith((
'You are attempting to access protected data with no configured',
'Failure: No handler was ready to authenticate.')):
raise CredentialsError(gsutil_path)
if 'status=401' in stderr or 'status 401' in stderr:
raise CredentialsError(gsutil_path)
if 'status=403' in stderr or 'status 403' in stderr:
raise PermissionError(gsutil_path)
if (stderr.startswith('InvalidUriError') or 'No such object' in stderr or
'No URLs matched' in stderr):
raise NotFoundError(stderr)
raise CloudStorageError(stderr)
return stdout
def List(bucket):
query = 'gs://%s/' % bucket
stdout = _RunCommand(['ls', query])
return [url[len(query):] for url in stdout.splitlines()]
def Exists(bucket, remote_path):
try:
_RunCommand(['ls', 'gs://%s/%s' % (bucket, remote_path)])
return True
except NotFoundError:
return False
def Move(bucket1, bucket2, remote_path):
url1 = 'gs://%s/%s' % (bucket1, remote_path)
url2 = 'gs://%s/%s' % (bucket2, remote_path)
logging.info('Moving %s to %s' % (url1, url2))
_RunCommand(['mv', url1, url2])
def Delete(bucket, remote_path):
url = 'gs://%s/%s' % (bucket, remote_path)
logging.info('Deleting %s' % url)
_RunCommand(['rm', url])
def Get(bucket, remote_path, local_path):
url = 'gs://%s/%s' % (bucket, remote_path)
logging.info('Downloading %s to %s' % (url, local_path))
_RunCommand(['cp', url, local_path])
def Insert(bucket, remote_path, local_path, publicly_readable=False):
url = 'gs://%s/%s' % (bucket, remote_path)
command_and_args = ['cp']
extra_info = ''
if publicly_readable:
command_and_args += ['-a', 'public-read']
extra_info = ' (publicly readable)'
command_and_args += [local_path, url]
logging.info('Uploading %s to %s%s' % (local_path, url, extra_info))
_RunCommand(command_and_args)
def GetIfChanged(file_path, bucket=None):
"""Gets the file at file_path if it has a hash file that doesn't match.
If the file is not in Cloud Storage, log a warning instead of raising an
exception. We assume that the user just hasn't uploaded the file yet.
Returns:
True if the binary was changed.
"""
hash_path = file_path + '.sha1'
if not os.path.exists(hash_path):
logging.warning('Hash file not found: %s' % hash_path)
return False
expected_hash = ReadHash(hash_path)
if os.path.exists(file_path) and CalculateHash(file_path) == expected_hash:
return False
if bucket:
buckets = [bucket]
else:
buckets = [PUBLIC_BUCKET, PARTNER_BUCKET, INTERNAL_BUCKET]
for bucket in buckets:
try:
url = 'gs://%s/%s' % (bucket, expected_hash)
_RunCommand(['cp', url, file_path])
logging.info('Downloaded %s to %s' % (url, file_path))
return True
except NotFoundError:
continue
logging.warning('Unable to find file in Cloud Storage: %s', file_path)
return False
def CalculateHash(file_path):
"""Calculates and returns the hash of the file at file_path."""
sha1 = hashlib.sha1()
with open(file_path, 'rb') as f:
while True:
# Read in 1mb chunks, so it doesn't all have to be loaded into memory.
chunk = f.read(1024*1024)
if not chunk:
break
sha1.update(chunk)
return sha1.hexdigest()
def ReadHash(hash_path):
with open(hash_path, 'rb') as f:
return f.read(1024).rstrip()
| bsd-3-clause |
eusi/MissionPlanerHM | Lib/encodings/koi8_r.py | 593 | 14035 | """ Python Character Mapping Codec koi8_r generated from 'MAPPINGS/VENDORS/MISC/KOI8-R.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='koi8-r',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u2500' # 0x80 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u2502' # 0x81 -> BOX DRAWINGS LIGHT VERTICAL
u'\u250c' # 0x82 -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2510' # 0x83 -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x84 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2518' # 0x85 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u251c' # 0x86 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2524' # 0x87 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u252c' # 0x88 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u2534' # 0x89 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u253c' # 0x8A -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u2580' # 0x8B -> UPPER HALF BLOCK
u'\u2584' # 0x8C -> LOWER HALF BLOCK
u'\u2588' # 0x8D -> FULL BLOCK
u'\u258c' # 0x8E -> LEFT HALF BLOCK
u'\u2590' # 0x8F -> RIGHT HALF BLOCK
u'\u2591' # 0x90 -> LIGHT SHADE
u'\u2592' # 0x91 -> MEDIUM SHADE
u'\u2593' # 0x92 -> DARK SHADE
u'\u2320' # 0x93 -> TOP HALF INTEGRAL
u'\u25a0' # 0x94 -> BLACK SQUARE
u'\u2219' # 0x95 -> BULLET OPERATOR
u'\u221a' # 0x96 -> SQUARE ROOT
u'\u2248' # 0x97 -> ALMOST EQUAL TO
u'\u2264' # 0x98 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0x99 -> GREATER-THAN OR EQUAL TO
u'\xa0' # 0x9A -> NO-BREAK SPACE
u'\u2321' # 0x9B -> BOTTOM HALF INTEGRAL
u'\xb0' # 0x9C -> DEGREE SIGN
u'\xb2' # 0x9D -> SUPERSCRIPT TWO
u'\xb7' # 0x9E -> MIDDLE DOT
u'\xf7' # 0x9F -> DIVISION SIGN
u'\u2550' # 0xA0 -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u2551' # 0xA1 -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2552' # 0xA2 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
u'\u0451' # 0xA3 -> CYRILLIC SMALL LETTER IO
u'\u2553' # 0xA4 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
u'\u2554' # 0xA5 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2555' # 0xA6 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
u'\u2556' # 0xA7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
u'\u2557' # 0xA8 -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u2558' # 0xA9 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
u'\u2559' # 0xAA -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
u'\u255a' # 0xAB -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u255b' # 0xAC -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
u'\u255c' # 0xAD -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
u'\u255d' # 0xAE -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u255e' # 0xAF -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
u'\u255f' # 0xB0 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
u'\u2560' # 0xB1 -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2561' # 0xB2 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
u'\u0401' # 0xB3 -> CYRILLIC CAPITAL LETTER IO
u'\u2562' # 0xB4 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
u'\u2563' # 0xB5 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2564' # 0xB6 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
u'\u2565' # 0xB7 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
u'\u2566' # 0xB8 -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2567' # 0xB9 -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
u'\u2568' # 0xBA -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
u'\u2569' # 0xBB -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u256a' # 0xBC -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
u'\u256b' # 0xBD -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
u'\u256c' # 0xBE -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\xa9' # 0xBF -> COPYRIGHT SIGN
u'\u044e' # 0xC0 -> CYRILLIC SMALL LETTER YU
u'\u0430' # 0xC1 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xC2 -> CYRILLIC SMALL LETTER BE
u'\u0446' # 0xC3 -> CYRILLIC SMALL LETTER TSE
u'\u0434' # 0xC4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xC5 -> CYRILLIC SMALL LETTER IE
u'\u0444' # 0xC6 -> CYRILLIC SMALL LETTER EF
u'\u0433' # 0xC7 -> CYRILLIC SMALL LETTER GHE
u'\u0445' # 0xC8 -> CYRILLIC SMALL LETTER HA
u'\u0438' # 0xC9 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xCA -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xCB -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xCC -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xCD -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xCE -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xCF -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xD0 -> CYRILLIC SMALL LETTER PE
u'\u044f' # 0xD1 -> CYRILLIC SMALL LETTER YA
u'\u0440' # 0xD2 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xD3 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xD4 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xD5 -> CYRILLIC SMALL LETTER U
u'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
u'\u0432' # 0xD7 -> CYRILLIC SMALL LETTER VE
u'\u044c' # 0xD8 -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044b' # 0xD9 -> CYRILLIC SMALL LETTER YERU
u'\u0437' # 0xDA -> CYRILLIC SMALL LETTER ZE
u'\u0448' # 0xDB -> CYRILLIC SMALL LETTER SHA
u'\u044d' # 0xDC -> CYRILLIC SMALL LETTER E
u'\u0449' # 0xDD -> CYRILLIC SMALL LETTER SHCHA
u'\u0447' # 0xDE -> CYRILLIC SMALL LETTER CHE
u'\u044a' # 0xDF -> CYRILLIC SMALL LETTER HARD SIGN
u'\u042e' # 0xE0 -> CYRILLIC CAPITAL LETTER YU
u'\u0410' # 0xE1 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0xE2 -> CYRILLIC CAPITAL LETTER BE
u'\u0426' # 0xE3 -> CYRILLIC CAPITAL LETTER TSE
u'\u0414' # 0xE4 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0xE5 -> CYRILLIC CAPITAL LETTER IE
u'\u0424' # 0xE6 -> CYRILLIC CAPITAL LETTER EF
u'\u0413' # 0xE7 -> CYRILLIC CAPITAL LETTER GHE
u'\u0425' # 0xE8 -> CYRILLIC CAPITAL LETTER HA
u'\u0418' # 0xE9 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0xEA -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0xEB -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0xEC -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0xED -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0xEE -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0xEF -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0xF0 -> CYRILLIC CAPITAL LETTER PE
u'\u042f' # 0xF1 -> CYRILLIC CAPITAL LETTER YA
u'\u0420' # 0xF2 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0xF3 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0xF4 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0xF5 -> CYRILLIC CAPITAL LETTER U
u'\u0416' # 0xF6 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0412' # 0xF7 -> CYRILLIC CAPITAL LETTER VE
u'\u042c' # 0xF8 -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042b' # 0xF9 -> CYRILLIC CAPITAL LETTER YERU
u'\u0417' # 0xFA -> CYRILLIC CAPITAL LETTER ZE
u'\u0428' # 0xFB -> CYRILLIC CAPITAL LETTER SHA
u'\u042d' # 0xFC -> CYRILLIC CAPITAL LETTER E
u'\u0429' # 0xFD -> CYRILLIC CAPITAL LETTER SHCHA
u'\u0427' # 0xFE -> CYRILLIC CAPITAL LETTER CHE
u'\u042a' # 0xFF -> CYRILLIC CAPITAL LETTER HARD SIGN
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
openstack/oslo.db | oslo_db/sqlalchemy/migration.py | 1 | 7224 | # coding=utf-8
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Base on code in migrate/changeset/databases/sqlite.py which is under
# the following license:
#
# The MIT License
#
# Copyright (c) 2009 Evan Rosson, Jan Dittberner, Domen Kožar
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import os
from debtcollector import removals
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
import sqlalchemy
from oslo_db._i18n import _
from oslo_db import exception
_removed_msg = (
'sqlalchemy-migrate support in oslo_db is deprecated; consider '
'migrating to alembic'
)
@removals.remove(message=_removed_msg, version='8.3.0')
def db_sync(engine, abs_path, version=None, init_version=0, sanity_check=True):
"""Upgrade or downgrade a database.
Function runs the upgrade() or downgrade() functions in change scripts.
:param engine: SQLAlchemy engine instance for a given database
:param abs_path: Absolute path to migrate repository.
:param version: Database will upgrade/downgrade until this version.
If None - database will update to the latest
available version.
:param init_version: Initial database version
:param sanity_check: Require schema sanity checking for all tables
"""
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.DBMigrationError(_("version should be an integer"))
current_version = db_version(engine, abs_path, init_version)
repository = _find_migrate_repo(abs_path)
if sanity_check:
_db_schema_sanity_check(engine)
if version is None or version > current_version:
try:
migration = versioning_api.upgrade(engine, repository, version)
except Exception as ex:
raise exception.DBMigrationError(ex)
else:
migration = versioning_api.downgrade(engine, repository,
version)
if sanity_check:
_db_schema_sanity_check(engine)
return migration
def _db_schema_sanity_check(engine):
"""Ensure all database tables were created with required parameters.
:param engine: SQLAlchemy engine instance for a given database
"""
if engine.name == 'mysql':
onlyutf8_sql = ('SELECT TABLE_NAME,TABLE_COLLATION '
'from information_schema.TABLES '
'where TABLE_SCHEMA=%s and '
'TABLE_COLLATION NOT LIKE \'%%utf8%%\'')
# NOTE(morganfainberg): exclude the sqlalchemy-migrate and alembic
# versioning tables from the tables we need to verify utf8 status on.
# Non-standard table names are not supported.
EXCLUDED_TABLES = ['migrate_version', 'alembic_version']
table_names = [res[0] for res in
engine.execute(onlyutf8_sql, engine.url.database) if
res[0].lower() not in EXCLUDED_TABLES]
if len(table_names) > 0:
raise ValueError(_('Tables "%s" have non utf8 collation, '
'please make sure all tables are CHARSET=utf8'
) % ','.join(table_names))
@removals.remove(message=_removed_msg, version='8.3.0')
def db_version(engine, abs_path, init_version):
"""Show the current version of the repository.
:param engine: SQLAlchemy engine instance for a given database
:param abs_path: Absolute path to migrate repository
:param init_version: Initial database version
"""
repository = _find_migrate_repo(abs_path)
try:
return versioning_api.db_version(engine, repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
meta.reflect(bind=engine)
tables = meta.tables
if (len(tables) == 0 or 'alembic_version' in tables or
'migrate_version' in tables):
db_version_control(engine, abs_path, version=init_version)
return versioning_api.db_version(engine, repository)
else:
raise exception.DBMigrationError(
_("The database is not under version control, but has "
"tables. Please stamp the current version of the schema "
"manually."))
@removals.remove(message=_removed_msg, version='8.3.0')
def db_version_control(engine, abs_path, version=None):
"""Mark a database as under this repository's version control.
Once a database is under version control, schema changes should
only be done via change scripts in this repository.
:param engine: SQLAlchemy engine instance for a given database
:param abs_path: Absolute path to migrate repository
:param version: Initial database version
"""
repository = _find_migrate_repo(abs_path)
try:
versioning_api.version_control(engine, repository, version)
except versioning_exceptions.InvalidVersionError as ex:
raise exception.DBMigrationError("Invalid version : %s" % ex)
except versioning_exceptions.DatabaseAlreadyControlledError:
raise exception.DBMigrationError("Database is already controlled.")
return version
def _find_migrate_repo(abs_path):
"""Get the project's change script repository
:param abs_path: Absolute path to migrate repository
"""
if not os.path.exists(abs_path):
raise exception.DBMigrationError("Path %s not found" % abs_path)
return Repository(abs_path)
| apache-2.0 |
fyfcauc/android_external_chromium-org | chrome/test/functional/media/media_stat_perf.py | 56 | 3328 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""CPU, Memory, and FPS performance test for <video>.
Calculates decoded fps, dropped fps, CPU, and memory statistics while playing
HTML5 media element. The test compares results of playing a media file on
different video resolutions.
"""
import logging
import os
import psutil
import pyauto_media
import pyauto
import pyauto_utils
# HTML test path; relative to src/chrome/test/data.
_TEST_HTML_PATH = os.path.join('media', 'html', 'media_stat_perf.html')
# Path under data path for test files.
_TEST_MEDIA_PATH_CROWD = os.path.join('pyauto_private', 'media', 'crowd')
# Path under data path for test files.
_TEST_MEDIA_PATH_TULIP = os.path.join('media', 'avperf', 'tulip')
# The media files used for testing.
_TEST_VIDEOS = [os.path.join(_TEST_MEDIA_PATH_CROWD, name) for name in [
'crowd2160.webm', 'crowd1080.webm']]
_TEST_VIDEOS.extend([os.path.join(_TEST_MEDIA_PATH_TULIP, name) for name in [
'tulip2.webm', 'tulip2.wav', 'tulip2.ogv', 'tulip2.ogg', 'tulip2.mp4',
'tulip2.mp3', 'tulip2.m4a']])
class MediaStatsPerfTest(pyauto.PyUITest):
"""PyAuto test container. See file doc string for more information."""
def _GetChromeRendererProcess(self):
"""Returns the Chrome renderer process."""
renderer_id = self.GetBrowserInfo()['windows'][0]['tabs'][1]['renderer_pid']
if not renderer_id:
self.fail('Can not find the tab renderer process.')
return psutil.Process(renderer_id)
def testMediaPerformance(self):
"""Launches HTML test which plays each video and records statistics."""
for file_name in _TEST_VIDEOS:
# Append a tab and delete it at the end of the test to free its memory.
self.AppendTab(pyauto.GURL(self.GetFileURLForDataPath(_TEST_HTML_PATH)))
file_url = self.GetFileURLForDataPath(file_name)
logging.debug('Running perf test for %s.', file_url)
renderer_process = self._GetChromeRendererProcess()
# Call to set a starting time to record CPU usage by the renderer.
renderer_process.get_cpu_percent()
self.assertTrue(
self.CallJavascriptFunc('startTest', [file_url], tab_index=1))
cpu_usage = renderer_process.get_cpu_percent()
mem_usage_mb = renderer_process.get_memory_info()[0] / 1024
file_name = os.path.basename(file_name)
pyauto_utils.PrintPerfResult('cpu', file_name, cpu_usage, '%')
pyauto_utils.PrintPerfResult('memory', file_name, mem_usage_mb, 'KB')
decoded_fps = [
float(value) for value in
self.GetDOMValue("decodedFPS.join(',')", tab_index=1).split(',')]
dropped_frames = self.GetDOMValue('droppedFrames', tab_index=1)
dropped_fps = [
float(value) for value in
self.GetDOMValue("droppedFPS.join(',')", tab_index=1).split(',')]
pyauto_utils.PrintPerfResult('fps', file_name, decoded_fps, 'fps')
pyauto_utils.PrintPerfResult('dropped_fps', file_name, dropped_fps, 'fps')
pyauto_utils.PrintPerfResult('dropped_frames', file_name, dropped_frames,
'frames')
self.CloseTab(tab_index=1)
if __name__ == '__main__':
pyauto_media.Main()
| bsd-3-clause |
yqm/sl4a | python/src/Lib/encodings/palmos.py | 647 | 2936 | """ Python Character Mapping Codec for PalmOS 3.5.
Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='palmos',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
# The PalmOS character set is mostly iso-8859-1 with some differences.
decoding_map.update({
0x0080: 0x20ac, # EURO SIGN
0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x0085: 0x2026, # HORIZONTAL ELLIPSIS
0x0086: 0x2020, # DAGGER
0x0087: 0x2021, # DOUBLE DAGGER
0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
0x0089: 0x2030, # PER MILLE SIGN
0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
0x008d: 0x2666, # BLACK DIAMOND SUIT
0x008e: 0x2663, # BLACK CLUB SUIT
0x008f: 0x2665, # BLACK HEART SUIT
0x0090: 0x2660, # BLACK SPADE SUIT
0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x0095: 0x2022, # BULLET
0x0096: 0x2013, # EN DASH
0x0097: 0x2014, # EM DASH
0x0098: 0x02dc, # SMALL TILDE
0x0099: 0x2122, # TRADE MARK SIGN
0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x009c: 0x0153, # LATIN SMALL LIGATURE OE
0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| apache-2.0 |
mancoast/CPythonPyc_test | cpython/252_test_pep277.py | 34 | 4098 | # Test the Unicode versions of normal file functions
# open, os.open, os.stat. os.listdir, os.rename, os.remove, os.mkdir, os.chdir, os.rmdir
import sys, os, unittest
from test import test_support
if not os.path.supports_unicode_filenames:
raise test_support.TestSkipped, "test works only on NT+"
filenames = [
'abc',
u'ascii',
u'Gr\xfc\xdf-Gott',
u'\u0393\u03b5\u03b9\u03ac-\u03c3\u03b1\u03c2',
u'\u0417\u0434\u0440\u0430\u0432\u0441\u0442\u0432\u0443\u0439\u0442\u0435',
u'\u306b\u307d\u3093',
u'\u05d4\u05e9\u05e7\u05e6\u05e5\u05e1',
u'\u66e8\u66e9\u66eb',
u'\u66e8\u05e9\u3093\u0434\u0393\xdf',
]
# Destroy directory dirname and all files under it, to one level.
def deltree(dirname):
# Don't hide legitimate errors: if one of these suckers exists, it's
# an error if we can't remove it.
if os.path.exists(dirname):
# must pass unicode to os.listdir() so we get back unicode results.
for fname in os.listdir(unicode(dirname)):
os.unlink(os.path.join(dirname, fname))
os.rmdir(dirname)
class UnicodeFileTests(unittest.TestCase):
files = [os.path.join(test_support.TESTFN, f) for f in filenames]
def setUp(self):
try:
os.mkdir(test_support.TESTFN)
except OSError:
pass
for name in self.files:
f = open(name, 'w')
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
def tearDown(self):
deltree(test_support.TESTFN)
def _apply_failure(self, fn, filename, expected_exception,
check_fn_in_exception = True):
try:
fn(filename)
raise test_support.TestFailed("Expected to fail calling '%s(%r)'"
% (fn.__name__, filename))
except expected_exception, details:
if check_fn_in_exception and details.filename != filename:
raise test_support.TestFailed("Function '%s(%r) failed with "
"bad filename in the exception: %r"
% (fn.__name__, filename,
details.filename))
def test_failures(self):
# Pass non-existing Unicode filenames all over the place.
for name in self.files:
name = "not_" + name
self._apply_failure(open, name, IOError)
self._apply_failure(os.stat, name, OSError)
self._apply_failure(os.chdir, name, OSError)
self._apply_failure(os.rmdir, name, OSError)
self._apply_failure(os.remove, name, OSError)
# listdir may append a wildcard to the filename, so dont check
self._apply_failure(os.listdir, name, OSError, False)
def test_open(self):
for name in self.files:
f = open(name, 'w')
f.write((name+'\n').encode("utf-8"))
f.close()
os.stat(name)
def test_listdir(self):
f1 = os.listdir(test_support.TESTFN)
# Printing f1 is not appropriate, as specific filenames
# returned depend on the local encoding
f2 = os.listdir(unicode(test_support.TESTFN,
sys.getfilesystemencoding()))
f2.sort()
print f2
def test_rename(self):
for name in self.files:
os.rename(name,"tmp")
os.rename("tmp",name)
def test_directory(self):
dirname = os.path.join(test_support.TESTFN,u'Gr\xfc\xdf-\u66e8\u66e9\u66eb')
filename = u'\xdf-\u66e8\u66e9\u66eb'
oldwd = os.getcwd()
os.mkdir(dirname)
os.chdir(dirname)
f = open(filename, 'w')
f.write((filename + '\n').encode("utf-8"))
f.close()
print repr(filename)
os.access(filename,os.R_OK)
os.remove(filename)
os.chdir(oldwd)
os.rmdir(dirname)
def test_main():
try:
test_support.run_unittest(UnicodeFileTests)
finally:
deltree(test_support.TESTFN)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
micwypych/github-cmake-project-checker | project_checker/tests/studentprojecttest.py | 1 | 2674 | from unittest import TestCase
from unittest.mock import MagicMock
from project_checker.checker.project import StudentProject
class ReportTest(TestCase):
def test_result_ranking_of_two_labs(self):
r1 = MagicMock(report={'lab1_ex1': 0, 'lab1_ex2': 2, 'lab1_ex3': 0},
__getitem__=lambda s, index: s.report[index])
r2 = MagicMock(report={'lab1_ex1': 2, 'lab1_ex2': 2, 'lab1_ex3': 2},
__getitem__=lambda s, index: s.report[index])
r3 = MagicMock(report={'lab1_ex1': 0, 'lab1_ex2': 0, 'lab1_ex3': 0},
__getitem__=lambda s, index: s.report[index])
written = []
read = ['lab2_ex1: 0', 'lab2_ex2: 2']
file = MagicMock(write=lambda line: written.append(line), __iter__=lambda *args: iter(read))
directory = MagicMock(all_partial_reports=lambda *args: [r1, r2, r3], open=lambda name, opt: file)
project = StudentProject('url.com/USER/REPO.git', directory)
project.report_dir = directory
project.compile_final_report('report')
self.assertEquals(
['lab1_ex1=0\n', 'lab1_ex2=0\n', 'lab1_ex3=0\n', 'lab1_ex1=ok\n', 'lab1_ex2=ok\n', 'lab1_ex3=ok\n',
'lab1_ex1=0\n', 'lab1_ex2=0\n', 'lab1_ex3=0\n', 'lab1_ex1=ok\n', 'lab1_ex2=ok\n', 'lab1_ex3=ok\n'],
written)
def test_changing_protocol_to_ssh(self):
self.assertEquals('ssh://git@github.com/USER/REPO.git', StudentProject.to_ssh('https://github.com/USER/REPO.git'))
class StudentProjectTest(TestCase):
def test_create_simple_project_name(self):
project = StudentProject('https://github.com/USER/REPO.git', MagicMock())
self.assertEquals('USER', project.user_dir_name)
self.assertEquals('REPO', project.project_dir_name)
def test_create_project_name_with_hyphens(self):
project = StudentProject('https://github.com/USER--WITH-HYPHENS/REPO-WITH--HYPHENS--.git', MagicMock())
self.assertEquals('USER--WITH-HYPHENS', project.user_dir_name)
self.assertEquals('REPO-WITH--HYPHENS--', project.project_dir_name)
def test_create_project_name_with_dots(self):
project = StudentProject('https://github.com/USER..WITH.HYPHENS/REPO.WITH.DOTS...git', MagicMock())
self.assertEquals('USER..WITH.HYPHENS', project.user_dir_name)
self.assertEquals('REPO.WITH.DOTS..', project.project_dir_name)
def test_create_project_without_git_extension(self):
project = StudentProject('https://github.com/USER/REPO', MagicMock())
self.assertEquals('USER', project.user_dir_name)
self.assertEquals('REPO', project.project_dir_name)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.